diff --git a/cplusplus/common/README_CN.md b/cplusplus/common/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..5c342461d52cbebbe34f65883d23ded305015335 --- /dev/null +++ b/cplusplus/common/README_CN.md @@ -0,0 +1,9 @@ +# 公共代码 + +#### 目录结构与说明 + +本目录为公共代码目录,目录结构和具体说明如下。 + +**./** +├── [atlasutil](./atlasutil):**atlasutil库** +└── [presenteragent](./presenteragent):**presenteragent库** \ No newline at end of file diff --git a/cplusplus/common/atlasutil/.ipynb_checkpoints/Makefile-checkpoint b/cplusplus/common/atlasutil/.ipynb_checkpoints/Makefile-checkpoint new file mode 100644 index 0000000000000000000000000000000000000000..0702ea314c6b23c9974ede676859501a5d7272aa --- /dev/null +++ b/cplusplus/common/atlasutil/.ipynb_checkpoints/Makefile-checkpoint @@ -0,0 +1,108 @@ +TOPDIR := $(patsubst %,%,$(CURDIR)) + +ifndef DDK_PATH +$(error "Can not find DDK_PATH env, please set it in environment!.") +endif + +ifeq ($(mode),) +mode=AtlasDK +endif + +ifeq ($(mode), AtlasDK) + CC := aarch64-linux-gnu-g++ + arch := arm +else ifeq ($(mode), ASIC) + CC := g++ + + arch_local=$(shell arch) + ifeq ($(arch_local), aarch64) + arch := arm + else + arch := x86 + endif +else + $(error "Unsupported mode: "$(mode)", please input: AtlasDK or ASIC.") +endif + +NPU_HOST_LIB = $(DDK_PATH)/acllib/lib64/stub/ + +LOCAL_MODULE_NAME := libatlasutil.so + +LOCAL_DIR := . +OUT_DIR = ./out/$(arch) +OBJ_DIR = $(OUT_DIR)/obj +DEPS_DIR = $(OUT_DIR)/deps +LOCAL_LIBRARY=$(OUT_DIR)/$(LOCAL_MODULE_NAME) +RPATH_DIR=/home/HwHiAiUser/ascend_ddk/$(arch)/lib + +INC_DIR = \ + -I./include \ + -I$(HOME)/Ascend/driver/ \ + -I$(HOME)/ascend_ddk/$(arch)/include/ \ + -I$(HOME)/ascend_ddk/$(arch)/include/ascenddk/ \ + -I$(HOME)/ascend_ddk/$(arch)/include/ascenddk/presenter/agent/ \ + -I$(DDK_PATH)/acllib/include/ \ + -I$(DDK_PATH)/atc/include/protobuf + +CC_FLAGS := $(INC_DIR) -DENABLE_DVPP_INTERFACE -std=c++11 -fPIC -Wall -O2 +LNK_FLAGS := \ + -Wl,-rpath-link=$(NPU_HOST_LIB) \ + -Wl,-rpath-link=$(HOME)/ascend_ddk/$(arch)/lib \ + -L$(NPU_HOST_LIB) \ + -L$(HOME)/ascend_ddk/$(arch)/lib \ + -lascendcl \ + -lacl_dvpp \ + -lstdc++ \ + -lpthread \ + -lavcodec \ + -lavformat \ + -lavdevice \ + -lavutil \ + -lswresample \ + -lavfilter \ + -lswscale \ + -shared + +ifeq ($(mode), AtlasDK) +LNK_FLAGS += -L$(HOME)/Ascend/driver -lmedia_mini +endif + +SRCS_ALL := $(patsubst $(LOCAL_DIR)/%.cpp, %.cpp, $(shell find $(LOCAL_DIR) -name "*.cpp")) +ifeq ($(mode), AtlasDK) +SRCS := $(SRCS_ALL) +CC_FLAGS += -DENABLE_BOARD_CAMARE +else +SRCS := $(subst src/camera.cpp, ,$(SRCS_ALL)) +endif + +OBJS := $(addprefix $(OBJ_DIR)/, $(patsubst %.cpp, %.o,$(SRCS))) + +ALL_OBJS := $(OBJS) + +all: do_pre_build do_build + +do_pre_build: + $(Q)echo - do [$@] + $(Q)mkdir -p $(OBJ_DIR) + +do_build: $(LOCAL_LIBRARY) | do_pre_build + $(Q)echo - do [$@] + +$(LOCAL_LIBRARY): $(ALL_OBJS) + $(Q)echo [LD] $@ + $(Q)$(CC) $(CC_FLAGS) -o $@ $^ -Wl,--whole-archive -Wl,--no-whole-archive -Wl,--start-group -Wl,--end-group -Wl,-rpath=$(RPATH_DIR) $(LNK_FLAGS) + +$(OBJS): $(OBJ_DIR)/%.o : %.cpp | do_pre_build + $(Q)echo [CC] $@ + $(Q)mkdir -p $(dir $@) + $(Q)$(CC) $(CC_FLAGS) $(INC_DIR) -c -fstack-protector-all $< -o $@ + +install: all + $(Q)echo [INSTALL] $@ + $(Q)mkdir -p $(HOME)/ascend_ddk/$(arch)/include/atlasutil + $(Q)mkdir -p $(HOME)/ascend_ddk/$(arch)/lib + $(Q)cp -R $(LOCAL_DIR)/include/* $(HOME)/ascend_ddk/$(arch)/include/atlasutil/ + $(Q)cp -R $(OUT_DIR)/lib*.so $(HOME)/ascend_ddk/$(arch)/lib/ + +clean: + rm -rf $(TOPDIR)/$(OUT_DIR) diff --git a/cplusplus/common/atlasutil/.ipynb_checkpoints/Untitled-checkpoint.ipynb b/cplusplus/common/atlasutil/.ipynb_checkpoints/Untitled-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/cplusplus/common/atlasutil/.ipynb_checkpoints/Untitled-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/cplusplus/common/atlasutil/.vscode/settings.json b/cplusplus/common/atlasutil/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..f94ace26024057e4ebdd5a216e31d7d36bb2340b --- /dev/null +++ b/cplusplus/common/atlasutil/.vscode/settings.json @@ -0,0 +1,55 @@ +{ + "files.associations": { + "array": "cpp", + "bitset": "cpp", + "string_view": "cpp", + "initializer_list": "cpp", + "regex": "cpp", + "utility": "cpp", + "atomic": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "chrono": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "condition_variable": "cpp", + "cstdarg": "cpp", + "cstddef": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cstring": "cpp", + "ctime": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "list": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "iterator": "cpp", + "map": "cpp", + "memory": "cpp", + "memory_resource": "cpp", + "optional": "cpp", + "ratio": "cpp", + "string": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "iosfwd": "cpp", + "iostream": "cpp", + "istream": "cpp", + "limits": "cpp", + "mutex": "cpp", + "new": "cpp", + "ostream": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "thread": "cpp", + "typeinfo": "cpp" + } +} \ No newline at end of file diff --git a/cplusplus/common/atlasutil/Makefile b/cplusplus/common/atlasutil/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..0702ea314c6b23c9974ede676859501a5d7272aa --- /dev/null +++ b/cplusplus/common/atlasutil/Makefile @@ -0,0 +1,108 @@ +TOPDIR := $(patsubst %,%,$(CURDIR)) + +ifndef DDK_PATH +$(error "Can not find DDK_PATH env, please set it in environment!.") +endif + +ifeq ($(mode),) +mode=AtlasDK +endif + +ifeq ($(mode), AtlasDK) + CC := aarch64-linux-gnu-g++ + arch := arm +else ifeq ($(mode), ASIC) + CC := g++ + + arch_local=$(shell arch) + ifeq ($(arch_local), aarch64) + arch := arm + else + arch := x86 + endif +else + $(error "Unsupported mode: "$(mode)", please input: AtlasDK or ASIC.") +endif + +NPU_HOST_LIB = $(DDK_PATH)/acllib/lib64/stub/ + +LOCAL_MODULE_NAME := libatlasutil.so + +LOCAL_DIR := . +OUT_DIR = ./out/$(arch) +OBJ_DIR = $(OUT_DIR)/obj +DEPS_DIR = $(OUT_DIR)/deps +LOCAL_LIBRARY=$(OUT_DIR)/$(LOCAL_MODULE_NAME) +RPATH_DIR=/home/HwHiAiUser/ascend_ddk/$(arch)/lib + +INC_DIR = \ + -I./include \ + -I$(HOME)/Ascend/driver/ \ + -I$(HOME)/ascend_ddk/$(arch)/include/ \ + -I$(HOME)/ascend_ddk/$(arch)/include/ascenddk/ \ + -I$(HOME)/ascend_ddk/$(arch)/include/ascenddk/presenter/agent/ \ + -I$(DDK_PATH)/acllib/include/ \ + -I$(DDK_PATH)/atc/include/protobuf + +CC_FLAGS := $(INC_DIR) -DENABLE_DVPP_INTERFACE -std=c++11 -fPIC -Wall -O2 +LNK_FLAGS := \ + -Wl,-rpath-link=$(NPU_HOST_LIB) \ + -Wl,-rpath-link=$(HOME)/ascend_ddk/$(arch)/lib \ + -L$(NPU_HOST_LIB) \ + -L$(HOME)/ascend_ddk/$(arch)/lib \ + -lascendcl \ + -lacl_dvpp \ + -lstdc++ \ + -lpthread \ + -lavcodec \ + -lavformat \ + -lavdevice \ + -lavutil \ + -lswresample \ + -lavfilter \ + -lswscale \ + -shared + +ifeq ($(mode), AtlasDK) +LNK_FLAGS += -L$(HOME)/Ascend/driver -lmedia_mini +endif + +SRCS_ALL := $(patsubst $(LOCAL_DIR)/%.cpp, %.cpp, $(shell find $(LOCAL_DIR) -name "*.cpp")) +ifeq ($(mode), AtlasDK) +SRCS := $(SRCS_ALL) +CC_FLAGS += -DENABLE_BOARD_CAMARE +else +SRCS := $(subst src/camera.cpp, ,$(SRCS_ALL)) +endif + +OBJS := $(addprefix $(OBJ_DIR)/, $(patsubst %.cpp, %.o,$(SRCS))) + +ALL_OBJS := $(OBJS) + +all: do_pre_build do_build + +do_pre_build: + $(Q)echo - do [$@] + $(Q)mkdir -p $(OBJ_DIR) + +do_build: $(LOCAL_LIBRARY) | do_pre_build + $(Q)echo - do [$@] + +$(LOCAL_LIBRARY): $(ALL_OBJS) + $(Q)echo [LD] $@ + $(Q)$(CC) $(CC_FLAGS) -o $@ $^ -Wl,--whole-archive -Wl,--no-whole-archive -Wl,--start-group -Wl,--end-group -Wl,-rpath=$(RPATH_DIR) $(LNK_FLAGS) + +$(OBJS): $(OBJ_DIR)/%.o : %.cpp | do_pre_build + $(Q)echo [CC] $@ + $(Q)mkdir -p $(dir $@) + $(Q)$(CC) $(CC_FLAGS) $(INC_DIR) -c -fstack-protector-all $< -o $@ + +install: all + $(Q)echo [INSTALL] $@ + $(Q)mkdir -p $(HOME)/ascend_ddk/$(arch)/include/atlasutil + $(Q)mkdir -p $(HOME)/ascend_ddk/$(arch)/lib + $(Q)cp -R $(LOCAL_DIR)/include/* $(HOME)/ascend_ddk/$(arch)/include/atlasutil/ + $(Q)cp -R $(OUT_DIR)/lib*.so $(HOME)/ascend_ddk/$(arch)/lib/ + +clean: + rm -rf $(TOPDIR)/$(OUT_DIR) diff --git a/cplusplus/common/atlasutil/README.md b/cplusplus/common/atlasutil/README.md new file mode 100644 index 0000000000000000000000000000000000000000..45a75600473d0dc16cdba85c614a021ab6637694 --- /dev/null +++ b/cplusplus/common/atlasutil/README.md @@ -0,0 +1,618 @@ +# atlasutil库使用说明 + +atlasutil库对当前开源社区样例中 + +1.Atlas200DK板载摄像头 + +2.acl dvpp图像和视频处理 + +3.acl模型推理等进行封装 + +等重复代码进行封装,提供一组公共接口。 + +注意: + +1.本库仅供当前社区开源样例使用,不覆盖ascend平台应用开发的所有场景,不作为用户应用开发的标准库; + +2.仅支持Atlas200DK和Atlas300样例。 + +## 编译方法 + +### 第三方库依赖 + +1. 依赖acl库,使用前需要安装ascend开发环境 + +2. 视频解码使用ffmpeg+dvpp,依赖ffmpeg库。ffmpeg的编译和部署参见[环境准备和依赖安装](../../environment) + +注意:开发环境是指编译应用代码的服务器;运行环境是指运行应用的Atlas200DK开发板或者Atals300云服务器;两者可以在同一个硬件设备上,也可以是分离的 + +### 编译步骤 + +1.进入src目录; + +2.执行编译安装命令。 + +Atlas200DK: + +``` +make +make install +``` + +编译生成的libatalsutil.so将被拷贝到`$HOME/ascend_ddk/arm/lib/`下;头文件拷贝到`$HOME/ascend_ddk/arm/include/atlasutil` + +Atlas300: + +``` +make mode=ASIC +make mode=ASIC install +``` + +编译生成的libatalsutil.so将被拷贝到`$HOME/ascend_ddk/x86/lib/`下;头文件拷贝到`$HOME/ascend_ddk/x86/include/atlasutil` + +## 部署方法 + +1. 将libatlasutil.so拷贝到运行环境/home/HwHiAiUser/ascend_ddk/arm/lib(Atlas200DK设备),或者/home/HwHiAiUser/ascend_ddk/x86/lib(Atlas300设备)下目录; + +2. 在运行环境下切换到 root用户,打开/etc/ld.so.conf.d/mind_so.conf ,将该目录追加到文件末尾,保存后退出,然后执行命令ldconfig + +## 接口说明 + +### AtlasCapture类 + +AtlasCapture类为Atlas200DK板载摄像头、RTSP视频流、mp4文件和H264/H265裸流文件解码提供统一接口。 + +#### AtlasVideoCapture + +方法:AtlasVideoCapture(uint32_t width = 1280, uint32_t height = 720, uint32_t fps = 20) + +说明:在Atlas200DK上打开一个可用的摄像头。如果0槽位摄像头可用,则选择0槽位;否则选择槽位1;如果两个摄像头都不可用,只生成实例,不会打开任何摄 像头。 + +输入参数:width:摄像头分辨率宽 + +​ height:摄像头分辨率高 + +​ fps:帧率, 参数范围为[1, 20] + +返回值:无 + +约束:1. 只支持atlas200dk; + +​ 2. 摄像头默认分辨率参数设置需要符合驱动要求,当前支持5种分辨率:1920 x 1080,1280 x 720,704 x 576,704 x 288,352 x 288。 + +#### AtlasVideoCapture + +方法: AtlasVideoCapture(uint32_t cameraId, uint32_t width = 1280, uint32_t height = 720, uint32_t fps = 20) + +说明: 在Atlas200DK上打开指定槽位的摄像头。如果该摄像头不可用,只生成实例,不会打开摄像头 + +输入参数: cameraId:摄像头id,0 表示CAMERA0槽位的摄像头,1 表示CAMERA1槽位的摄像头 + +​ width:摄像头分辨率宽 + +​ height:摄像头分辨率高 + +​ fps:帧率, 参数范围为[1, 20] + +返回值:无 + +约束: 1. 只支持atlas200dk; + +​ 2. 摄像头默认分辨率参数设置需要符合驱动要求,当前支持5种分辨率:1920 x 1080,1280 x 720,704 x 576,704 x 288,352 x 288。 + +#### AtlasVideoCapture + +方法:AtlasVideoCapture(const string& videoPath, aclrtContext context = nullptr) + +说明:解码视频videoPath + +输入参数:videoPath:视频文件或者rtsp地址; + +​ context:解码器使用dvpp vdec解码时使用的acl context。默认情况下使用当前线程的context + +返回值:无 + +约束: 解码器使用ffmpeg+vdec解码视频,在创建实例前需要初始化acl(aclInit)和设置device(aclrtSetDevice) + +注意事项:无 + +#### IsOpened + +方法: bool IsOpened() + +说明: 判断摄像头或者视频流是否已经打开 + +输入参数: 无 + +返回值: true: 已经打开摄像头,或者可以解码视频流; + +​ false: 摄像头不可用,或者视频流无法解码。 + +约束: 无 + +注意: 无 + +#### Get + +方法: uint32_t Get(StreamProperty key) + +说明: 获取摄像头或者视频解码属性 + +输入参数: key属性,定义如下 + +​ enum StreamProperty { + +​ FRAME_WIDTH = 1, ///视频分辨率宽 + +​ FRAME_HEIGHT = 2,//视频分辨率高 + +​ VIDEO_FPS = 3,//频解码帧率 + +​ ... + +​ }; + +返回值: 属性值 + +约束: 只有视频流解码支持Get获取属性; + +注意: 无 + +#### Set + +方法: AtlasError Set(StreamProperty key, uint32_t value) + +说明: 设置解码属性 + +输入参数: 1.key属性: + +​ enum StreamProperty { + +​ ...... + +​ OUTPUT_IMAGE_FORMAT = 4,//输出图像格式 + +​ RTSP_TRANSPORT = 5 //rtsp传输方式 +​ }; + +​ 2.value 属性值。图像格式支持PIXEL_FORMAT_YUV_SEMIPLANAR_420和PIXEL_FORMAT_YVU_SEMIPLANAR_420; + +​ 传输属性支持RTSP_TRANS_UDP(值为0)和RTSP_TRANS_TCP(值为1) + +返回值: 是否成功 + +约束: 1.只有视频流支持Set设置属性; + +​ 2.Mp4文件和rtsp都支持设置输出图像格式; + +​ 3.只有RTSP流支持设置传输方式 + +注意: 无 + +#### Read + +方法: AtlasError Read(ImageData& frame) + +说明: 读取摄像头或者视频流解码后的一帧视频图像 + +输入: Frame:视频图像数据和属性 + +返回值: ATLAS_OK:读取成功 + +其他: 读取失败。当前解码异常或者已经解码完毕 + +约束说明: 获取的数据内存为DVPP内存,因为内存不能在不同的context间传递,所以创建解码器时传入的context和调用Read接口线程的context必须相同,否则图像数据不可用 + +注意事项: 果返回值不为ATLAS_OK,表示当前解码失败建议不再读取 + +#### Close + +方法: AtlasError Close() + +说明: 停止解码或者关闭摄像头 + +输入参数: 无 + +返回值: ATLAS_OK:关闭/停止成功 + +其他: 关闭/停止失败 + +约束: 无 + +注意事项 + +### DvppProcess类 + +#### DvppProcess + +方法: DvppProcess() + +说明: 创建实例 + +输入参数: 无 + +返回值: 无 + +约束: 无 + +注意事项 + +#### InitResource + +方法: AtlasError InitResource(aclrtStream& stream) + +说明: DvppProcess初始化 + +输入参数: Stream: DvppProcess实例使用的acl stream + +返回值: ATLAS_OK:初始化成功 + +​ 其他: 初始化失败 + +约束: 无 + +注意: 无 + +#### Resize + +方法: AtlasError Resize(ImageData& dest, ImageData& src, uint32_t width, uint32_t height) + +说明: 将图片缩放到指定大小 + +输入参数: dest: resize后的图片数据,格式为Yuv420sp NV12, 图片数据存放在dvpp内存中 + +​ src: 待压缩图片,尺寸满足dvpp对齐要求 + +​ width: 缩放目标大小的宽度 + +​ height: 缩放目标大小的高度 + +返回值: ATLAS_OK: 缩放成功 + +​ 其他: 缩放失败 + +约束: 输入图片内存为dvpp, 并且满足16x2对齐 + +注意事项: acl dvpp在缩放图片时,输出图片是16x2对齐的,所以缩放结果图片不一定和接口参数一致,例如将图片缩放参数是300x300,得到的图片将是304x300 + +#### JpegD + +方法: AtlasError JpegD(ImageData& destYuv, ImageData& srcJpeg) + +说明: 将jpeg图片解码为yuv420sp图片 + +输入参数: destYuv: 解码后的yuv图片 + +​ srcJpeg: 待解码的jpeg图片 + +返回值: ATLAS_OK:解码成功 + +其他: 解码失败 + +约束: 输入图片内存为dvpp,并满足dvpp jpegd约束 + +注意: 输出yuv图片满足16x2对齐,不一定和输入图片尺寸一致 + +#### JpegE + + 方法: AtlasError JpegE(ImageData& destJpeg, ImageData& srcYuv) + +说明: 将yuv图片编码为jpeg图片 + +输入参数: destJpeg:生成的jpeg图片 + +​ SrcYuv:输入的yuv 图片 + +返回值: ATLAS_OK:编码成功 + +其他: 编码失败 + +约束: 无 + +注意事项: 无 + +### AtlasModel类 + +#### AtlasModel + +方法: AtlasModel(const string& modelPath) + +说明: Acl model的封装 + +输入参数: modelPath 离线模型路径 + +返回值: 无 + +约束: 无 + +注意事项: 无 + +#### Init方法 + +方法: AtlasError Init() + +说明: AtlasModel初始化 + +输入参数: 无 + +返回值说明: ATLAS_OK: 初始化成功 + +​ 其他: 初始化失败 + +约束: 无 + +注意事项: 无 + +#### CreateInput + +方法: AtlasError CreateInput(void *input, uint32_t inputSize) + +说明: 创建模型推理输入 + +输入参数: input: 推理数据 + +​ inputSize: 数据大小 + +返回值: ATLAS_OK:创建成功 + +​ 其他: 创建失败 + +约束: 输入数据input在device或者dvpp内存中 + +注意事项: 无 + +#### CreateInput + +方法: AtlasError CreateInput(void* input1, uint32_t input1Size, void* input2, uint32_t input2Size) + +说明: 创建模型推理输入 + +输入参数: input1: 第一个输入 + +​ input1Size: 第一个输入数据大小 + +​ input2: 第二个输入 + +​ input2Size: 第二个输入大小 + +返回值: ATLAS_OK: 创建成功 + +​ 其他: 创建失败 + +约束: 要求输入数据在device或者dvpp内存中 + +注意事项: 无 + +#### CreateInput + +方法: AtlasError CreateInput(vector& inputData) + +说明: 创建模型推理输入。用于输入有多个的模型。 + +输入参数: inputData:输入数据列表 + +​ struct DataInfo { + +​ void* data; + +​ uint32_t size; + +​ }; + +返回值: ATLAS_OK: 创建成功 + +​ 其他: 创建失败 + +约束: 要求数据在device或者dvpp内存中 + +注意事项: 无 + +#### DestroyInput + +方法: void DestroyInput() + +说明: 销毁创建的模型输入 + +输入参数: 无 + +返回值: 无 + +约束: 无 + +注意事项: 只释放CreateInput创建的dataset结构,不会释放输入的数据 + +#### Execute方法 + +方法: AtlasError Execute(vector& inferOutputs); + +说明: 执行模型推理 + +输入参数: inferOutputs 输出参数,推理结果: + +​ struct InferenceOutput { + +​ shared_ptr data = nullptr; + +​ uint32_t size; + +​ }; + +返回值: ATLAS_OK:推理成功 + +​ 其他: 推理成功 + +约束: 无 + +注意事项: 无 + +### 日志 + +#### ATLAS_LOG_ERROR + +方法: ATLAS_LOG_ERROR(fmt, ...) + +说明: 打印acl ERROR级别日志到/var/log/npu/slog/host-0/host-xxxx.log + +输入参数: fmt:格式化字符串 + +返回值: 无 + +约束: 无 + +注意事项: 无 + +#### ATLAS_LOG_INFO + +方法: ATLAS_LOG_INFO(fmt, ...) + +说明: 打印acl INFO级别日志到/var/log/npu/slog/host-0/host-xxxx.log + +输入参数: log_info:打印的日志内容,支持格式化字符串,记录的日志包括函数、文件和行号 + +返回值: 无 + +约束: 无 + +注意事项: 无 + +#### ATLAS_LOG_DEBUG + +方法: ATLAS_LOG_DEBUG(fmt, ...) + +说明: 打印acl DEBUG级别日志到/var/log/npu/slog/host-0/host-xxxx.log + +输入参数: log_info:打印的日志内容,支持格式化字符串,记录的日志包括函数、文件和行号 + +返回值: 无 + +约束: 无 + +注意事项: 无 + +### 其他接口 + +#### ReadConfig + +方法: bool ReadConfig(map& config, const char* configFile) + +说明: 解析配置文件 + +输入参数: config:解析结果 + +​ configFile:配置文件路径 + +返回值: true:解析成功 + +​ false: 解析失败 + +约束: 类似如下的配置文件,要求配置项名称唯一: + +​ [baseconf] + +​ presenter_server_ip=192.168.1.166 + +注意事项: 无 + +#### CopyDataToHost + +方法: void* CopyDataToHost(void* data, uint32_t size, aclrtRunMode curRunMode, MemoryType memType) + +说明: 数据拷贝到host侧 + +输入参数: data:待拷贝数据 + +​ size: 数据大小 + +​ curRunMode:当前的runMode + +​ memType:拷贝目的内存种类 + +​ enum MemoryType { + +​ MEMORY_NORMAL = 0,//使用new申请的uint8_t类型内存 + +​ MEMORY_HOST,//acl接口申请的host内存 + +​ MEMORY_DEVICE,//acl接口申请的device内存 + +​ MEMORY_DVPP, //dvpp内存 + +​ MEMORY_INVALID_TYPE + +​ }; + +返回值: 拷贝后的目的内存指针 + +约束: 不支持Atlas300dk服务器本地内存之间的拷贝 + +注意事项: 无 + +#### CopyDataToDevice + +方法: void* CopyDataToDevice(void* data, uint32_t size, aclrtRunMode curRunMode, MemoryType memType) + +说明: 数据拷贝到device侧 + +输入参数: data:待拷贝数据 + +​ size: 数据大小 + +​ curRunMode:当前的runMode + +​ memType:拷贝目的内存种类 + +​ enum MemoryType { + +​ MEMORY_NORMAL = 0,//使用new申请的uint8_t类型内存 + +​ MEMORY_HOST,//acl接口申请的host内存 + +​ MEMORY_DEVICE,//acl接口申请的device内存 + +​ MEMORY_DVPP, //dvpp内存 + +​ MEMORY_INVALID_TYPE + +​ }; + +返回值: 拷贝后的目的内存指针 + +约束: 无 + +注意事项: 无 + +#### SaveBinFile + +方法: void SaveBinFile(const char* filename, void* data, uint32_t size) + +说明: 将数据保存二进制文件 + +输入参数: filename:带路径的二进制文件名 + +​ Data:二进制数据 + +​ size:数据大小 + +返回值: 拷贝后的目的内存指针 + +约束: 无 + +注意事项: 无 + +#### ReadBinFile + +方法: AtlasError ReadBinFile(const char* filename, void*& data, uint32_t& size) + +说明: 将数据保存二进制文件 + +输入: filename:带路径的二进制文件名 + +​ Data:读取的二进制数据 + +​ size:数据大小 + +返回值: 拷贝后的目的内存指针 + +约束: 无 + +注意事项: 无 \ No newline at end of file diff --git a/cplusplus/common/atlasutil/Untitled.ipynb b/cplusplus/common/atlasutil/Untitled.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/cplusplus/common/atlasutil/Untitled.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/cplusplus/common/atlasutil/include/acl_device.h b/cplusplus/common/atlasutil/include/acl_device.h new file mode 100644 index 0000000000000000000000000000000000000000..26146376c99646fa242dd45f516d69270cb16e8a --- /dev/null +++ b/cplusplus/common/atlasutil/include/acl_device.h @@ -0,0 +1,47 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.h +* Description: handle file operations +*/ +#pragma once + +#include +#include + +#include "acl/acl.h" +#include "atlas_error.h" + +class AclDevice { +public: + AclDevice(); + AclDevice(int32_t devId, const std::string& aclConfigPath, + bool useDefaultCtx = true); + ~AclDevice(); + + AtlasError Init(); + void Release(); + + aclrtRunMode GetRunMode() { return runMode_; } + aclrtContext GetContext() { return context_; } + +private: + int32_t deviceId_; + std::string aclConfig_; + aclrtRunMode runMode_; + aclrtContext context_; + bool useDefaultCtx_; + bool isReleased_; +}; diff --git a/cplusplus/common/atlasutil/include/atlas_app.h b/cplusplus/common/atlasutil/include/atlas_app.h new file mode 100644 index 0000000000000000000000000000000000000000..9d05058b5d648e0ae40e48711e4a861b8e9c3cb9 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_app.h @@ -0,0 +1,77 @@ +/** +* @file sample_process.h +* +* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +*/ +#pragma once + +#include "acl/acl.h" +#include "atlas_thread_mgr.h" + +namespace { + int kMainThreadId = 0; +} + +typedef int (*AtlasMsgProcess)(uint32_t msgId, shared_ptr msgData, void* userData); + +class AtlasApp { +public: + /** + * @brief Constructor + */ + AtlasApp(); + AtlasApp(const AtlasApp&) = delete; + AtlasApp& operator=(const AtlasApp&) = delete; + + /** + * @brief Destructor + */ + ~AtlasApp(); + + /** + * @brief Get the single instance of AtlasApp + * @return Instance of AtlasApp + */ + static AtlasApp& GetInstance() { + static AtlasApp instance; + return instance; + } + + /** + * @brief Create one app thread * + * @return Result of create thread + */ + int CreateAtlasThread(AtlasThread* thInst, const std::string& instName, + aclrtContext context, aclrtRunMode runMode); + int Start(vector& threadParamTbl); + void Wait(); + void Wait(AtlasMsgProcess msgProcess, void* param); + int GetAtlasThreadIdByName(const std::string& threadName); + AtlasError SendMessage(int dest, int msgId, shared_ptr data); + void WaitEnd() { waitEnd_ = true; } + void Exit(); + +private: + AtlasError Init(); + int CreateAtlasThreadMgr(AtlasThread* thInst, const std::string& instName, + aclrtContext context, aclrtRunMode runMode); + bool CheckThreadAbnormal(); + bool CheckThreadNameUnique(const std::string& theadName); + void ReleaseThreads(); + void DestroyResource(); + +private: + bool isReleased_; + bool waitEnd_; + std::vector threadList_; +}; + +AtlasApp& CreateAtlasAppInstance(); +AtlasApp& GetAtlasAppInstance(); +AtlasError SendMessage(int dest, int msgId, shared_ptr data); +int GetAtlasThreadIdByName(const std::string& threadName); + diff --git a/cplusplus/common/atlasutil/include/atlas_error.h b/cplusplus/common/atlasutil/include/atlas_error.h new file mode 100644 index 0000000000000000000000000000000000000000..ed9608d0b57d43fbccc498d9dbdbaac9804505d0 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_error.h @@ -0,0 +1,198 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.h +* Description: handle file operations +*/ +#pragma once + +#include + +typedef int AtlasError; + +const int ATLAS_OK = 0; +const int ATLAS_ERROR = 1; +const int ATLAS_ERROR_INVALID_ARGS = 2; +const int ATLAS_ERROR_SET_ACL_CONTEXT = 3; +const int ATLAS_ERROR_GET_ACL_CONTEXT = 4; +const int ATLAS_ERROR_CREATE_ACL_CONTEXT = 5; +const int ATLAS_ERROR_CREATE_THREAD = 6; +const int ATLAS_ERROR_CREATE_STREAM = 7; +const int ATLAS_ERROR_GET_RUM_MODE = 8; +const int ATLAS_ERROR_APP_INIT = 9; +const int ATLAS_ERROR_DEST_INVALID = 10; +const int ATLAS_ERROR_INITED_ALREADY = 11; +const int ATLAS_ERROR_ENQUEUE = 12; +const int ATLAS_ERROR_WRITE_FILE = 13; +const int ATLAS_ERROR_THREAD_ABNORMAL = 14; +const int ATLAS_ERROR_START_THREAD = 15; +const int ATLAS_ERROR_ADD_THREAD = 16; + +//malloc or new memory failed +const int ATLAS_ERROR_MALLOC = 101; +//aclrtMalloc failed +const int ATLAS_ERROR_MALLOC_DEVICE = 102; + +const int ATLAS_ERROR_MALLOC_DVPP = 103; + +//access file failed +const int ATLAS_ERROR_ACCESS_FILE = 201; +//the file is invalid +const int ATLAS_ERROR_INVALID_FILE = 202; +//open file failed +const int ATLAS_ERROR_OPEN_FILE = 203; + +//load model repeated +const int ATLAS_ERROR_LOAD_MODEL_REPEATED = 301; + +const int ATLAS_ERROR_NO_MODEL_DESC = 302; +//load mode by acl failed +const int ATLAS_ERROR_LOAD_MODEL = 303; + +const int ATLAS_ERROR_CREATE_MODEL_DESC = 304; + +const int ATLAS_ERROR_GET_MODEL_DESC = 305; + +const int ATLAS_ERROR_CREATE_DATASET = 306; + +const int ATLAS_ERROR_CREATE_DATA_BUFFER = 307; + +const int ATLAS_ERROR_ADD_DATASET_BUFFER = 308; + +const int ATLAS_ERROR_EXECUTE_MODEL = 309; + +const int ATLAS_ERROR_GET_DATASET_BUFFER = 310; + +const int ATLAS_ERROR_GET_DATA_BUFFER_ADDR = 311; + +const int ATLAS_ERROR_GET_DATA_BUFFER_SIZE = 312; + +const int ATLAS_ERROR_COPY_DATA = 313; + +const int ATLAS_ERROR_SET_CAMERA = 400; + +const int ATLAS_ERROR_CAMERA_NO_ACCESSABLE = 401; + +const int ATLAS_ERROR_OPEN_CAMERA = 402; + +const int ATLAS_ERROR_READ_CAMERA_FRAME = 403; + +const int ATLAS_ERROR_UNSURPPORT_PROPERTY = 404; + +const int ATLAS_ERROR_INVALID_PROPERTY_VALUE = 405; + +const int ATLAS_ERROR_UNSURPPORT_VIDEO_CAPTURE =406; + +const int ATLAS_ERROR_CREATE_DVPP_CHANNEL_DESC = 501; + +const int ATLAS_ERRROR_CREATE_DVPP_CHANNEL = 502; + +const int ATLAS_ERROR_CREATE_PIC_DESC = 503; + +const int ATLAS_ERROR_CREATE_RESIZE_CONFIG = 504; + +const int ATLAS_ERROR_RESIZE_ASYNC = 505; + +const int ATLAS_ERROR_SYNC_STREAM = 506; + +const int ATLAS_ERROR_JPEGE_ASYNC = 507; + +const int ATLAS_ERROR_JPEGD_ASYNC = 508; + +const int ATLAS_ERROR_FFMPEG_DECODER_INIT = 601; + +const int ATLAS_ERROR_OPEN_VIDEO_UNREADY = 602; + +const int ATLAS_ERROR_TOO_MANY_VIDEO_DECODERS = 603; + +const int ATLAS_ERROR_SET_VDEC_CHANNEL_ID = 604; + +const int ATLAS_ERROR_SET_STREAM_DESC_DATA = 605; + +const int ATLAS_ERROR_SET_VDEC_CHANNEL_THREAD_ID = 606; + +const int ATLAS_ERROR_SET_VDEC_CALLBACK = 607; + +const int ATLAS_ERROR_SET_VDEC_ENTYPE = 608; + +const int ATLAS_ERROR_SET_VDEC_PIC_FORMAT = 609; + +const int ATLAS_ERROR_CREATE_VDEC_CHANNEL = 610; + +const int ATLAS_ERROR_CREATE_STREAM_DESC = 611; + +const int ATLAS_ERROR_SET_STREAM_DESC_EOS = 612; + +const int ATLAS_ERROR_SET_STREAM_DESC_SIZE = 613; + +const int ATLAS_ERROR_SET_PIC_DESC_DATA = 614; + +const int ATLAS_ERROR_SET_PIC_DESC_SIZE = 615; + +const int ATLAS_ERROR_SET_PIC_DESC_FORMAT = 616; + +const int ATLAS_ERROR_VDEC_IS_EXITTING = 617; + +const int ATLAS_ERROR_VDEC_SET_WIDTH = 618; + +const int ATLAS_ERROR_VDEC_WIDTH_INVALID = 619; + +const int ATLAS_ERROR_VDEC_HEIGHT_INVALID = 620; + +const int ATLAS_ERROR_VDEC_SET_HEIGHT = 621; + +const int ATLAS_ERROR_VDEC_ENTYPE_INVALID = 622; + +const int ATLAS_ERROR_VDEC_FORMAT_INVALID = 623; + +const int ATLAS_ERROR_VDEC_INVALID_PARAM = 624; + +const int ATLAS_ERROR_VDEC_SEND_FRAME = 625; + +const int ATLAS_ERROR_VDEC_QUEUE_FULL = 626; + +const int ATLAS_ERROR_SET_RTSP_TRANS = 627; + +const int ATLAS_ERROR_READ_EMPTY = 628; + +const int ATLAS_ERROR_VIDEO_DECODER_STATUS = 629; + +const int ATLAS_ERROR_DECODE_FINISH = 630; + +const int ATLAS_ERROR_H26X_FRAME = 631; + +const int ATLAS_ERROR_VENC_STATUS = 701; + +const int ATLAS_ERROR_VENC_QUEUE_FULL = 702; + +const int ATLAS_ERROR_CREATE_VENC_CHAN_DESC = 703; + +const int ATLAS_ERROR_SET_VENC_CHAN_TID = 704; + +const int ATLAS_ERROR_VENC_SET_EOS = 705; + +const int ATLAS_ERROR_VENC_SET_IF_FRAME = 706; + +const int ATLAS_ERROR_CREATE_VENC_CHAN = 707; + +const int ATLAS_ERROR_VENC_CREATE_FRAME_CONFIG = 708; + +const int ATLAS_ERROR_VENC_SEND_FRAME = 709; + +const int ATLAS_ERROR_SUBSCRIBE_REPORT = 710; + + + + diff --git a/cplusplus/common/atlasutil/include/atlas_model.h b/cplusplus/common/atlasutil/include/atlas_model.h new file mode 100644 index 0000000000000000000000000000000000000000..b99938f7f689ec7f1a0e7d4f799340428b16c1e9 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_model.h @@ -0,0 +1,71 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File model_process.h +* Description: handle model process +*/ +#pragma once +#include +#include "atlas_utils.h" +#include "acl/acl.h" + +class AtlasModel { +public: + AtlasModel(); + AtlasModel(const std::string& modelPath); + + ~AtlasModel(); + + AtlasError Init(); + AtlasError Init(const std::string& modelPath); + + void DestroyResource(); + AtlasError CreateInput(void *input, uint32_t input1size); + AtlasError CreateInput(void *input1, uint32_t input1size, + void* input2, uint32_t input2size); + AtlasError CreateInput(std::vector& inputData); + AtlasError Execute(std::vector& inferOutputs, + void *data, uint32_t size); + AtlasError Execute(std::vector& inferOutputs); + void DestroyInput(); + +private: + AtlasError LoadModelFromFile(const std::string& modelPath); + AtlasError CreateDesc(); + AtlasError CreateOutput(); + AtlasError AddDatasetBuffer(aclmdlDataset* dataset, + void* buffer, uint32_t bufferSize); + AtlasError GetOutputItem(InferenceOutput& out, + uint32_t idx); + void Unload(); + void DestroyDesc(); + void DestroyOutput(); + +private: + std::string modelPath_; + bool loadFlag_; // model load flag + uint32_t modelId_; + void *modelMemPtr_; + size_t modelMemSize_; + void *modelWeightPtr_; + size_t modelWeightSize_; + aclmdlDesc *modelDesc_; + aclmdlDataset *input_; + aclmdlDataset *output_; + aclrtRunMode runMode_; + size_t outputsNum_; + bool isReleased_; +}; + diff --git a/cplusplus/common/atlasutil/include/atlas_thread.h b/cplusplus/common/atlasutil/include/atlas_thread.h new file mode 100644 index 0000000000000000000000000000000000000000..3e7cd499c2acfc6f880d8f92a464a9c34e7a76ea --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_thread.h @@ -0,0 +1,49 @@ +#pragma once +#include +#include +#include +#include +#include "thread_safe_queue.h" +#include "acl/acl.h" +#include "atlas_error.h" + +using namespace std; + +#define INVALID_INSTANCE_ID (-1) + +class AtlasThread{ +public: + // 构造函数 + AtlasThread(); + ~AtlasThread() {}; + virtual int Init() { return ATLAS_OK; }; + virtual int Process(int msgId, shared_ptr msgData) = 0; + + int SelfInstanceId() { return instanceId_; } + string& SelfInstanceName() { return instanceName_; } + aclrtContext GetContext() { return context_; } + aclrtRunMode GetRunMode() { return runMode_; } + + AtlasError BaseConfig(int instanceId, const string& threadName, + aclrtContext context, aclrtRunMode runMode); +private: + aclrtContext context_; + aclrtRunMode runMode_; + + int instanceId_; + string instanceName_; + + bool baseConfiged_; + bool isExit_; +}; + +struct AtlasThreadParam { + AtlasThread* threadInst = nullptr; + string threadInstName = ""; + aclrtContext context = nullptr; + aclrtRunMode runMode = ACL_HOST; + + int threadInstId = INVALID_INSTANCE_ID; +}; + + diff --git a/cplusplus/common/atlasutil/include/atlas_thread_mgr.h b/cplusplus/common/atlasutil/include/atlas_thread_mgr.h new file mode 100644 index 0000000000000000000000000000000000000000..4864f3cd00183ad7739f8d0b68bf656eefe093e5 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_thread_mgr.h @@ -0,0 +1,48 @@ +#pragma once +#include +#include +#include +#include +#include "atlas_utils.h" +#include "thread_safe_queue.h" +#include "atlas_thread.h" + +enum AtlasThreadStatus { + THREAD_READY = 0, + THREAD_RUNNING = 1, + THREAD_EXITING = 2, + THREAD_EXITED = 3, + THREAD_ERROR = 4, +}; + +class AtlasThreadMgr{ +public: + // 构造函数 + AtlasThreadMgr(AtlasThread* userThreadInstance, + const std::string& threadName); + ~AtlasThreadMgr(); + // 线程函数 + static void ThreadEntry(void* data); + + AtlasThread* GetUserInstance() { return this->userInstance_; } + const std::string& GetThreadName(){ return name_; } + // 将 AtlasMessage 数据发送到队列中 + AtlasError PushMsgToQueue(shared_ptr& pMessage); + // 从队列中将 AtlasMessage 数据取出 + shared_ptr PopMsgFromQueue(){ return this->msgQueue_.Pop(); } + void CreateThread(); + void SetStatus(AtlasThreadStatus status) { status_ = status; } + AtlasThreadStatus GetStatus() { return status_; } + AtlasError WaitThreadInitEnd(); + +public: + std::string name_; + AtlasThread* userInstance_; + + bool isExit_; + AtlasThreadStatus status_; + ThreadSafeQueue> msgQueue_; +}; + + + diff --git a/cplusplus/common/atlasutil/include/atlas_type.h b/cplusplus/common/atlasutil/include/atlas_type.h new file mode 100644 index 0000000000000000000000000000000000000000..fac4485a74f2cf7c1b626cb205209be5d4954f45 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_type.h @@ -0,0 +1,97 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.h +* Description: handle file operations +*/ +#pragma once + + +#include +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +enum MemoryType { + MEMORY_NORMAL = 0, + MEMORY_HOST, + MEMORY_DEVICE, + MEMORY_DVPP, + MEMORY_INVALID_TYPE +}; + +enum CopyDirection { + TO_DEVICE = 0, + TO_HOST, + INVALID_COPY_DIRECT +}; + +enum CameraId { + CAMERA_ID_0 = 0, + CAMERA_ID_1, + CAMERA_ID_INVALID, +}; + +struct ImageData { + acldvppPixelFormat format; + uint32_t width = 0; + uint32_t height = 0; + uint32_t alignWidth = 0; + uint32_t alignHeight = 0; + uint32_t size = 0; + std::shared_ptr data = nullptr; +}; + +struct FrameData { + bool isFinished = false; + uint32_t frameId = 0; + uint32_t size = 0; + void* data = nullptr; +}; + +struct Resolution { + uint32_t width = 0; + uint32_t height = 0; +}; + +struct Rect { + uint32_t ltX = 0; + uint32_t ltY = 0; + uint32_t rbX = 0; + uint32_t rbY = 0; +}; + +struct BBox { + Rect rect; + uint32_t score = 0; + std::string text; +}; + +struct AtlasMessage { + int dest; + int msgId; + std::shared_ptr data = nullptr; +}; + +struct DataInfo { + void* data; + uint32_t size; +}; + +struct InferenceOutput { + std::shared_ptr data = nullptr; + uint32_t size; +}; \ No newline at end of file diff --git a/cplusplus/common/atlasutil/include/atlas_utils.h b/cplusplus/common/atlasutil/include/atlas_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..e1401384434586c00f7907f307e55b4ca0da6d3a --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_utils.h @@ -0,0 +1,342 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.h +* Description: handle file operations +*/ +#pragma once + +#include +#include +#include +#include +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +#include "atlas_error.h" +#include "atlas_type.h" + +/** + * @brief calculate RGB 24bits image size + * @param [in]: width: image width + * @param [in]: height: image height + * @return bytes size of image + */ +#define RGBU8_IMAGE_SIZE(width, height) ((width) * (height) * 3) + +/** + * @brief calculate RGB C3F32 image size + * @param [in]: width: image width + * @param [in]: height: image height + * @return bytes size of image + */ +#define RGBF32_IMAGE_SIZE(width, height) ((width) * (height) * 3 * sizeof(float)) + +/** + * @brief calculate YUVSP420 image size + * @param [in]: width: image width + * @param [in]: height: image height + * @return bytes size of image + */ +#define YUV420SP_SIZE(width, height) ((width) * (height) * 3 / 2) + +/** + * @brief calculate YUVSP420 nv12 load to opencv mat height paramter + * @param [in]: height: yuv image height + * @return bytes size of image + */ +#define YUV420SP_CV_MAT_HEIGHT(height) ((height) * 3 / 2) + +/** + * @brief generate shared pointer of dvpp memory + * @param [in]: buf: memory pointer, malloc by acldvppMalloc + * @return shared pointer of input buffer + */ +#define SHARED_PRT_DVPP_BUF(buf) (shared_ptr((uint8_t *)(buf), [](uint8_t* p) { acldvppFree(p); })) + +/** + * @brief generate shared pointer of device memory + * @param [in]: buf: memory pointer, malloc by acldvppMalloc + * @return shared pointer of input buffer + */ +#define SHARED_PRT_DEV_BUF(buf) (shared_ptr((uint8_t *)(buf), [](uint8_t* p) { aclrtFree(p); })) + +/** + * @brief generate shared pointer of memory + * @param [in]: buf memory pointer, malloc by new + * @return shared pointer of input buffer + */ +#define SHARED_PRT_U8_BUF(buf) (shared_ptr((uint8_t *)(buf), [](uint8_t* p) { delete[](p); })) + +/** + * @brief calculate aligned number + * @param [in]: num: the original number that to aligned + * @param [in]: align: the align factor + * @return the number after aligned + */ +#define ALIGN_UP(num, align) (((num) + (align) - 1) & ~((align) - 1)) + +/** + * @brief calculate number align with 2 + * @param [in]: num: the original number that to aligned + * @return the number after aligned + */ +#define ALIGN_UP2(num) ALIGN_UP(num, 2) + +/** + * @brief calculate number align with 16 + * @param [in]: num: the original number that to aligned + * @return the number after aligned + */ +#define ALIGN_UP16(num) ALIGN_UP(num, 16) + +/** + * @brief calculate number align with 128 + * @param [in]: num: the original number that to aligned + * @return the number after aligned + */ +#define ALIGN_UP128(num) ALIGN_UP(num, 128) + +/** + * @brief calculate elements num of array + * @param [in]: array: the array variable + * @return elements num of array + */ +#define SIZEOF_ARRAY(array) (sizeof(array)/sizeof(array[0])) + +/** + * @brief Write acl error level log to host log + * @param [in]: fmt: the input format string + * @return none + */ +#define ATLAS_LOG_ERROR(fmt, ...) \ + do{aclAppLog(ACL_ERROR, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__); \ + fprintf(stdout, "[ERROR] " fmt "\n", ##__VA_ARGS__);}while(0) + +/** + * @brief Write acl info level log to host log + * @param [in]: fmt: the input format string + * @return none + */ +#define ATLAS_LOG_INFO(fmt, ...) \ + do{aclAppLog(ACL_INFO, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__); \ + fprintf(stdout, "[INFO] " fmt "\n", ##__VA_ARGS__);}while(0) + +/** + * @brief Write acl debug level log to host log + * @param [in]: fmt: the input format string + * @return none + */ +#define ATLAS_LOG_DEBUG(fmt, ...) \ + do{aclAppLog(ACL_DEBUG, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__); \ + fprintf(stdout, "[INFO] " fmt "\n", ##__VA_ARGS__);}while(0) + +/** + * @brief Recognize the string is a accessable directory or not + * @param [in]: path: the input string + * @return bool true: is directory; false: not directory + */ +bool IsDirectory(const std::string &path); + +/** + * @brief Copy data to device + * @param [in]: data: The data to copy + * @param [in]: size: The data bytes size + * @param [in]: curRunMode: The run mode, get by aclrtGetRunMode, + * Atlas200DK is ACL_DEVICE, Atlas300 is ACL_HOST + * @param [in]: memType: The dest memory type:MEMORY_NORMAL(in Atlas200DK), + * MEMORY_DEVICE, MEMORY_DVPP + * @return void* The dest memory pointer + */ +void* CopyDataToDevice(const void* data, uint32_t size, + aclrtRunMode curRunMode, MemoryType memType); + +/** + * @brief Copy data to device buffer + * @param [in]: dest: The device buffer + * @param [in]: destSize: The device buffer size + * @param [in]: src: The data to copy + * @param [in]: srcSize: The data bytes size + * @param [in]: curRunMode: The run mode, get by aclrtGetRunMode, + * Atlas200DK is ACL_DEVICE, Atlas300 is ACL_HOST + * @return AtlasError ATLAS_OK: copy success + * others: copy failed + */ +AtlasError CopyDataToDeviceEx(void* dest, uint32_t destSize, + const void* src, uint32_t srcSize, + aclrtRunMode runMode); + +/** + * @brief Copy data to host + * @param [in]: data: The data to be copy + * @param [in]: size: The data bytes size + * @param [in]: curRunMode: The run mode, get by aclrtGetRunMode, + * Atlas200DK is ACL_DEVICE, Atlas300 is ACL_HOST + * @param [in]: memType: The dest memory type:MEMORY_NORMAL, MEMORY_HOST + * @return void* The dest memory pointer + */ +void* CopyDataToHost(const void* data, uint32_t size, + aclrtRunMode curRunMode, MemoryType memType); + +/** + * @brief Copy data to host buffer + * @param [in]: dest: The host buffer + * @param [in]: destSize: The host buffer size + * @param [in]: src: The data to copy + * @param [in]: srcSize: The data bytes size + * @param [in]: curRunMode: The run mode, get by aclrtGetRunMode, + * Atlas200DK is ACL_DEVICE, Atlas300 is ACL_HOST + * @return AtlasError ATLAS_OK: copy success + * others: copy failed + */ +AtlasError CopyDataToHostEx(void* dest, uint32_t destSize, + const void* src, uint32_t srcSize, + aclrtRunMode runMode); + +/** + * @brief Copy data to memory + * @param [in]: data: The data to be copy + * @param [in]: size: The data bytes size + * @param [in]: policy: the kind of sync, + * typedef enum aclrtMemcpyKind { + * ACL_MEMCPY_HOST_TO_HOST, // Host内的内存复制 + * ACL_MEMCPY_HOST_TO_DEVICE, // Host到Device的内存复制 + * ACL_MEMCPY_DEVICE_TO_HOST, // Device到Host的内存复制 + * ACL_MEMCPY_DEVICE_TO_DEVICE, // Device内的内存复制 + * } aclrtMemcpyKind; + * @param [in]: memType: The dest memory type + * @return void* The dest memory pointer + */ +void* CopyData(const void* data, uint32_t size, + aclrtMemcpyKind policy, MemoryType memType); + +/** + * @brief Read jpeg image file. Only support baseline, not support progressive + * @param [out]: image: image data read from file. + * @param [in]: fileName: The data bytes size + * @return AtlasError ATLAS_OK: read success + * others: read failed + */ +AtlasError ReadJpeg(ImageData& image, const std::string& fileName); + +/** + * @brief Get all files from file list string + * @param [in]: pathList: files list string, seperate by ',', + * the element could be file path or directory + * @param [in]: fileVec: The data bytes size + * @return AtlasError ATLAS_OK: read success + * others: read failed + */ +void GetAllFiles(const std::string &pathList, + std::vector &fileVec); + +/** + * @brief Save data to binary file + * @param [in]: filename: binary file name with path + * @param [in]: data: binary data + * @param [in]: size: bytes size of data + * @return AtlasError ATLAS_OK: read success + * others: read failed + */ +void SaveBinFile(const std::string& filename, const void* data, uint32_t size); + +/** + * @brief Read binary file to buffer + * @param [in]: filename: binary file name with path + * @param [in]: data: buffer + * @param [in]: size: buffer size + * @return AtlasError ATLAS_OK: read success + * others: read failed + */ +AtlasError ReadBinFile(const std::string& filename, + void* data, uint32_t& size); + +/** + * @brief Copy image to memory that malloc by new + * @param [out]: destImage: The image after copy + * @param [in]: srcImage: The image to copy + * @param [in]: curRunMode: The run mode, get by aclrtGetRunMode, + * Atlas200DK is ACL_DEVICE, Atlas300 is ACL_HOST + * @return AtlasError ATLAS_OK: read success + * others: read failed + */ +AtlasError CopyImageToLocal(ImageData& destImage, + ImageData& srcImage, aclrtRunMode curRunMode); + +/** + * @brief Copy image to acl device + * @param [out]: destImage: The image after copy + * @param [in]: srcImage: The image to copy + * @param [in]: curRunMode: The run mode, get by aclrtGetRunMode, + * Atlas200DK is ACL_DEVICE, Atlas300 is ACL_HOST + * @param [in]: memType: memory type, dvpp is MEMORY_DVPP, + * device is MEMPRY_DEVICE + * @return AtlasError ATLAS_OK: read success + * others: read failed + */ +AtlasError CopyImageToDevice(ImageData& destImage, ImageData& srcImage, + aclrtRunMode curRunMode, MemoryType memType); + +/** + * @brief Match ip address string as <1-255>.<0-255>.<0-255>.<0-255>: + * @param [in]: addrStr: Ip address string + * @return bool true: The input string match success + * false: is not match + */ +bool IsIpAddrWithPort(const std::string& addrStr); + +/** + * @brief Split ip address string <1-255>.<0-255>.<0-255>.<0-255>: to + * ip and port + * @param [out]: ip: Ip address <1-255>.<0-255>.<0-255>.<0-255> + * @param [out]: port: port string + * @param [in]: addr: Ip address string + * @return None + */ +void ParseIpAddr(std::string& ip, std::string& port, const std::string& addr); + +/** + * @brief Judge input string is mp4 file path + * @param [in]: path: file path + * @return bool true: input string is mp4 file path + * false: is not mp4 file path + */ +bool IsVideoFile(const std::string& path); + +/** + * @brief Judge input string is rtsp addr link rtsp:// + * @param [in]: str: input string + * @return bool true: input string is rtsp address + * false: is not rtsp address + */ +bool IsRtspAddr(const std::string &str); + +/** + * @brief Judge input string is digit string + * @param [in]: str: input string + * @return bool true: input string is digit string + * false: is not rtsp address + */ +bool IsDigitStr(const std::string& str); + +/** + * @brief Test file path is exist or not + * @param [in]: path: file path + * @return bool true: file path is exist + * false: is not exist + */ +bool IsPathExist(const std::string &path); diff --git a/cplusplus/common/atlasutil/include/atlas_videocap_base.h b/cplusplus/common/atlasutil/include/atlas_videocap_base.h new file mode 100644 index 0000000000000000000000000000000000000000..7f45b25d1bb4ba24749e69cb460643f1388bb365 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_videocap_base.h @@ -0,0 +1,53 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.h +* Description: handle file operations +*/ +#pragma once + +#include "atlas_error.h" +#include "atlas_type.h" + +#define RTSP_TRANS_UDP ((uint32_t)0) +#define RTSP_TRANS_TCP ((uint32_t)1) + +enum StreamProperty { + FRAME_WIDTH = 1, + FRAME_HEIGHT = 2, + VIDEO_FPS = 3, + OUTPUT_IMAGE_FORMAT = 4, + RTSP_TRANSPORT = 5 +}; + +class AtlasVideoCapBase { +public: + AtlasVideoCapBase(){} + virtual ~AtlasVideoCapBase(){}; + + virtual bool IsOpened() = 0; + + virtual AtlasError Set(StreamProperty key, uint32_t value) { return ATLAS_OK; } + virtual uint32_t Get(StreamProperty key) { return 0; } + + virtual AtlasError Read(ImageData& frame) = 0; + virtual AtlasError Close() = 0; + + virtual AtlasError Open() = 0; + +}; + + + diff --git a/cplusplus/common/atlasutil/include/atlas_videocapture.h b/cplusplus/common/atlasutil/include/atlas_videocapture.h new file mode 100644 index 0000000000000000000000000000000000000000..57cfb90efd4ace0a61d8d851d8f99d009d800618 --- /dev/null +++ b/cplusplus/common/atlasutil/include/atlas_videocapture.h @@ -0,0 +1,53 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.h +* Description: handle file operations +*/ +#pragma once + +#include +#include "atlas_utils.h" +#include "atlas_videocap_base.h" + +class AtlasVideoCapture { +public: + AtlasVideoCapture(); + + ~AtlasVideoCapture(); + + AtlasVideoCapture(uint32_t cameraId, uint32_t width = 1280, + uint32_t height = 720, uint32_t fps = 20); + + AtlasVideoCapture(const std::string& videoPath, + aclrtContext context = nullptr); + + bool IsOpened(); + + AtlasError Set(StreamProperty key, uint32_t value); + uint32_t Get(StreamProperty key); + + AtlasError Read(ImageData& frame); + + AtlasError Close(); + +private: + AtlasError Open(); + +private: + AtlasVideoCapBase* cap_; +}; + + diff --git a/cplusplus/common/atlasutil/include/dvpp_process.h b/cplusplus/common/atlasutil/include/dvpp_process.h new file mode 100644 index 0000000000000000000000000000000000000000..0d0f850d3c46c27523b8eb699380b75006aa49a1 --- /dev/null +++ b/cplusplus/common/atlasutil/include/dvpp_process.h @@ -0,0 +1,52 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.h +* Description: handle dvpp process +*/ +#pragma once +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" +#include "atlas_utils.h" + +/** + * DvppProcess + */ +class DvppProcess { +public: + DvppProcess(); + ~DvppProcess(); + + AtlasError Init(); + AtlasError Resize(ImageData& dest,ImageData& src, + uint32_t width, uint32_t height); + AtlasError JpegD(ImageData& destYuv, ImageData& srcJpeg); + AtlasError JpegE(ImageData& destJpeg, ImageData& srcYuv); + AtlasError Crop(ImageData& dest, ImageData& src, + uint32_t ltHorz, uint32_t ltVert, + uint32_t rbHorz, uint32_t rbVert); + + void DestroyResource(); + +protected: + int isInitOk_; + aclrtStream stream_; + acldvppChannelDesc *dvppChannelDesc_; + + bool isReleased_; +}; + diff --git a/cplusplus/common/atlasutil/include/parse_config.h b/cplusplus/common/atlasutil/include/parse_config.h new file mode 100644 index 0000000000000000000000000000000000000000..0d2367cb4eb3f0b107956842fb3a211972ca367f --- /dev/null +++ b/cplusplus/common/atlasutil/include/parse_config.h @@ -0,0 +1,9 @@ +#pragma once + +#include +#include + +bool ReadConfig(std::map& config, + const char* configFile); +void PrintConfig(const std::map & m); + diff --git a/cplusplus/common/atlasutil/include/thread_safe_queue.h b/cplusplus/common/atlasutil/include/thread_safe_queue.h new file mode 100644 index 0000000000000000000000000000000000000000..425c49844440f9c447ed5178552e1a8a2e2cfc90 --- /dev/null +++ b/cplusplus/common/atlasutil/include/thread_safe_queue.h @@ -0,0 +1,138 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef THREAD_SAFE_QUEUE_H_ +#define THREAD_SAFE_QUEUE_H_ + +#include +#include + +template +class ThreadSafeQueue { +public: + + /** + * @brief ThreadSafeQueue constructor + * @param [in] capacity: the queue capacity + */ + ThreadSafeQueue(uint32_t capacity) { + // check the input value: capacity is valid + if (capacity >= kMinQueueCapacity && capacity <= kMaxQueueCapacity) { + queueCapacity = capacity; + } else { // the input value: capacity is invalid, set the default value + queueCapacity = kDefaultQueueCapacity; + } + } + + /** + * @brief ThreadSafeQueue constructor + */ + ThreadSafeQueue() { + queueCapacity = kDefaultQueueCapacity; + } + + /** + * @brief ThreadSafeQueue destructor + */ + ~ThreadSafeQueue() = default; + + /** + * @brief push data to queue + * @param [in] input_value: the value will push to the queue + * @return true: success to push data; false: fail to push data + */ + bool Push(T input_value) { + std::lock_guard lock(mutex_); + + // check current size is less than capacity + if (queue_.size() < queueCapacity) { + queue_.push(input_value); + return true; + } + + return false; + } + + /** + * @brief pop data from queue + * @return true: success to pop data; false: fail to pop data + */ + T Pop() { + std::lock_guard lock(mutex_); + if (queue_.empty()) { // check the queue is empty + return nullptr; + } + + T tmp_ptr = queue_.front(); + queue_.pop(); + return tmp_ptr; + } + + /** + * @brief check the queue is empty + * @return true: the queue is empty; false: the queue is not empty + */ + bool Empty() { + std::lock_guard lock(mutex_); + return queue_.empty(); + } + + /** + * @brief get the queue size + * @return the queue size + */ + uint32_t Size() { + std::lock_guard lock(mutex_); + return queue_.size(); + } + + void ExtendCapacity(uint32_t newSize) { + queueCapacity = newSize; + kMaxQueueCapacity = newSize >kMaxQueueCapacity ? newSize : kMaxQueueCapacity; + } + +private: + std::queue queue_; // the queue + + uint32_t queueCapacity; // queue capacity + + mutable std::mutex mutex_; // the mutex value + + const uint32_t kMinQueueCapacity = 1; // the minimum queue capacity + + const uint32_t kMaxQueueCapacity = 10000; // the maximum queue capacity + + const uint32_t kDefaultQueueCapacity = 10; // default queue capacity +}; + +#endif /* THREAD_SAFE_QUEUE_H_ */ diff --git a/cplusplus/common/atlasutil/include/venc_process.h b/cplusplus/common/atlasutil/include/venc_process.h new file mode 100644 index 0000000000000000000000000000000000000000..fa624f3553115b217ee26551c5ae821021f6cafa --- /dev/null +++ b/cplusplus/common/atlasutil/include/venc_process.h @@ -0,0 +1,96 @@ +/** +* @file venc_process.h +* +* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +*/ +#pragma once +#include +#include +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +#include "atlas_utils.h" +#include "thread_safe_queue.h" + +enum VencStatus { + STATUS_VENC_INIT = 0, + STATUS_VENC_WORK, + STATUS_VENC_FINISH, + STATUS_VENC_EXIT, + STATUS_VENC_ERROR, +}; + +struct VencConfig { + uint32_t maxWidth = 0; + uint32_t maxHeight = 0; + std::string outFile; + acldvppPixelFormat format = PIXEL_FORMAT_YUV_SEMIPLANAR_420; + acldvppStreamFormat enType = H264_MAIN_LEVEL; + aclrtContext context = nullptr; + aclrtRunMode runMode = ACL_HOST; +}; + +class DvppVenc { +public: + DvppVenc(VencConfig& vencConfig); + ~DvppVenc(); + + AtlasError Init(); + AtlasError Process(ImageData& image); + void Finish(); + +private: + AtlasError InitResource(); + AtlasError CreateVencChannel(); + AtlasError CreateInputPicDesc(ImageData& image); + AtlasError CreateFrameConfig(); + AtlasError SetFrameConfig(uint8_t eos, uint8_t forceIFrame); + AtlasError SaveVencFile(void* vencData, uint32_t size); + void DestroyResource(); + + static void Callback(acldvppPicDesc *input, + acldvppStreamDesc *output, void *userData); + static void* SubscribleThreadFunc(void *arg); +private: + VencConfig vencInfo_; + + pthread_t threadId_; + aclvencChannelDesc *vencChannelDesc_; + aclvencFrameConfig *vencFrameConfig_; + acldvppPicDesc *inputPicDesc_; + aclrtStream vencStream_; + + FILE *outFp_; + bool isFinished_; +}; + + +class VencProcess { + public: + VencProcess(VencConfig& vencConfig); + ~VencProcess(); + + AtlasError Init(); + AtlasError Process(ImageData& image); + + void SetStatus(VencStatus status) { status_ = status; } + VencStatus GetStatus() { return status_; } + +private: + static void AsyncVencThreadEntry(void* arg); + std::shared_ptr GetEncodeImage(); + +private: + VencConfig vencInfo_; + + VencStatus status_; + DvppVenc* vencProc_; + ThreadSafeQueue> frameImageQueue_; +}; + diff --git a/cplusplus/common/atlasutil/out/arm/libatlasutil.so b/cplusplus/common/atlasutil/out/arm/libatlasutil.so new file mode 100644 index 0000000000000000000000000000000000000000..4a6bf100e75ebde1eb0025ea0b2eb2e851692c64 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/libatlasutil.so differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/acl_device.o b/cplusplus/common/atlasutil/out/arm/obj/src/acl_device.o new file mode 100644 index 0000000000000000000000000000000000000000..5bf1f2b64b1959542aecdcb3bd9078b7e2bc95af Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/acl_device.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/atlas_app.o b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_app.o new file mode 100644 index 0000000000000000000000000000000000000000..b7f1dea63638ed4f9889f1cbc4a79e8c09843039 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_app.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/atlas_model.o b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_model.o new file mode 100644 index 0000000000000000000000000000000000000000..0c44d1cab64ab85269edf8b7ace99bd898e8d5ce Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_model.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/atlas_thread.o b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_thread.o new file mode 100644 index 0000000000000000000000000000000000000000..4d3d169b8c50a6e3e3b36a3b577a897646b5d2bb Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_thread.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/atlas_thread_mgr.o b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_thread_mgr.o new file mode 100644 index 0000000000000000000000000000000000000000..58f41d5a5a9a7b6f5d6e234907d07557781fe1e7 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_thread_mgr.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/atlas_utils.o b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_utils.o new file mode 100644 index 0000000000000000000000000000000000000000..46443046da69f1936b2ad6ccd83de2b1ecc75a81 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_utils.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/atlas_videocapture.o b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_videocapture.o new file mode 100644 index 0000000000000000000000000000000000000000..a9256660bbcecda428df781a9da0f4fd513a2df6 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/atlas_videocapture.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/camera.o b/cplusplus/common/atlasutil/out/arm/obj/src/camera.o new file mode 100644 index 0000000000000000000000000000000000000000..692a8db7c2ad29b2e5e8de688deafa038f0b2569 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/camera.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_cropandpaste.o b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_cropandpaste.o new file mode 100644 index 0000000000000000000000000000000000000000..d236050b21f9b8404e976ad63b3fbabc3b7ae1f5 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_cropandpaste.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_jpegd.o b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_jpegd.o new file mode 100644 index 0000000000000000000000000000000000000000..00ba89833485d7e2482f044cf13353e0f24a5b81 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_jpegd.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_jpege.o b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_jpege.o new file mode 100644 index 0000000000000000000000000000000000000000..aa3b5d46b9785eabdb6f8e6f392230179f2c6d20 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_jpege.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_mem_mgr.o b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_mem_mgr.o new file mode 100644 index 0000000000000000000000000000000000000000..6c262efd770d23347de2876ad9a3682d2d686916 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_mem_mgr.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_process.o b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_process.o new file mode 100644 index 0000000000000000000000000000000000000000..ce928bacdfc588c057db6915a01c5280aaef0029 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_process.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_resize.o b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_resize.o new file mode 100644 index 0000000000000000000000000000000000000000..081eede9d6e8284aefe3ca532db4c5dbd5231008 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/dvpp_resize.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/ffmpeg_decoder.o b/cplusplus/common/atlasutil/out/arm/obj/src/ffmpeg_decoder.o new file mode 100644 index 0000000000000000000000000000000000000000..38c1f5d1c143fd820457df5fb18778d0aab7d423 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/ffmpeg_decoder.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/parse_config.o b/cplusplus/common/atlasutil/out/arm/obj/src/parse_config.o new file mode 100644 index 0000000000000000000000000000000000000000..552d05f355d88b114f30a2e4f1fcee8bd664f083 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/parse_config.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/vdec_process.o b/cplusplus/common/atlasutil/out/arm/obj/src/vdec_process.o new file mode 100644 index 0000000000000000000000000000000000000000..4d3b64f013faa2c4cb62610016b878b5829dad77 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/vdec_process.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/venc_process.o b/cplusplus/common/atlasutil/out/arm/obj/src/venc_process.o new file mode 100644 index 0000000000000000000000000000000000000000..2dab69305d2dfca27fc90e967ee67f6c348b7974 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/venc_process.o differ diff --git a/cplusplus/common/atlasutil/out/arm/obj/src/video_decode.o b/cplusplus/common/atlasutil/out/arm/obj/src/video_decode.o new file mode 100644 index 0000000000000000000000000000000000000000..9ea308df2c8fbe25765918c3721782cf4691e781 Binary files /dev/null and b/cplusplus/common/atlasutil/out/arm/obj/src/video_decode.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/acl_device.o b/cplusplus/common/atlasutil/out/x86/obj/src/acl_device.o new file mode 100644 index 0000000000000000000000000000000000000000..a0d943d19137ba7f14dce9ddac3395a534931bd1 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/acl_device.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/atlas_app.o b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_app.o new file mode 100644 index 0000000000000000000000000000000000000000..444a534f8c7dc4c9acd545a4bc6bf646f84028c6 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_app.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/atlas_model.o b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_model.o new file mode 100644 index 0000000000000000000000000000000000000000..539ca71fee9765068d05fa5dc20c8c492b9997d0 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_model.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/atlas_thread.o b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_thread.o new file mode 100644 index 0000000000000000000000000000000000000000..ae1794dd51f1d34686874db4b9a67d971a533f2d Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_thread.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/atlas_thread_mgr.o b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_thread_mgr.o new file mode 100644 index 0000000000000000000000000000000000000000..05995463c65e2d19a468ff205e3415b426b00f2b Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_thread_mgr.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/atlas_utils.o b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_utils.o new file mode 100644 index 0000000000000000000000000000000000000000..381c70123713f4ca72427d9eb2bd757bb9459586 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_utils.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/atlas_videocapture.o b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_videocapture.o new file mode 100644 index 0000000000000000000000000000000000000000..14739e3d5f2782594c6659fa29f728cfcbfd627a Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/atlas_videocapture.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_cropandpaste.o b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_cropandpaste.o new file mode 100644 index 0000000000000000000000000000000000000000..70c12b7ded3ec14c55cf32f8ec166665c913959e Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_cropandpaste.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_jpegd.o b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_jpegd.o new file mode 100644 index 0000000000000000000000000000000000000000..3f96e3c6d017da110d55c9e473ba748d5a0582f4 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_jpegd.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_jpege.o b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_jpege.o new file mode 100644 index 0000000000000000000000000000000000000000..c7ba31f54d2396f9db26165c1d0392de9c7fa16e Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_jpege.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_mem_mgr.o b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_mem_mgr.o new file mode 100644 index 0000000000000000000000000000000000000000..48ddd3134905875f1d3439a2709e121f494646aa Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_mem_mgr.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_process.o b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_process.o new file mode 100644 index 0000000000000000000000000000000000000000..d1941c1c6fc44ba27fb28732f40ef254da11e98f Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_process.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_resize.o b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_resize.o new file mode 100644 index 0000000000000000000000000000000000000000..e5e4c71e1c4768c5747474eae8b0ccf2dd8e9a91 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/dvpp_resize.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/ffmpeg_decoder.o b/cplusplus/common/atlasutil/out/x86/obj/src/ffmpeg_decoder.o new file mode 100644 index 0000000000000000000000000000000000000000..0b55c9d9bd4c591a8983d06a4be80aedb7cf94ba Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/ffmpeg_decoder.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/parse_config.o b/cplusplus/common/atlasutil/out/x86/obj/src/parse_config.o new file mode 100644 index 0000000000000000000000000000000000000000..045f2c6bed37d2ccd51491db020032ba5845d841 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/parse_config.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/vdec_process.o b/cplusplus/common/atlasutil/out/x86/obj/src/vdec_process.o new file mode 100644 index 0000000000000000000000000000000000000000..54b1c8fb0bfcd2d4f3a6818ac1e93df12c86ff1a Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/vdec_process.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/venc_process.o b/cplusplus/common/atlasutil/out/x86/obj/src/venc_process.o new file mode 100644 index 0000000000000000000000000000000000000000..6fd9cf6298fbe283f9fda9346a165f48ac0778f8 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/venc_process.o differ diff --git a/cplusplus/common/atlasutil/out/x86/obj/src/video_decode.o b/cplusplus/common/atlasutil/out/x86/obj/src/video_decode.o new file mode 100644 index 0000000000000000000000000000000000000000..e18782bd2ea18935b66e9ca0f7d047f55991e682 Binary files /dev/null and b/cplusplus/common/atlasutil/out/x86/obj/src/video_decode.o differ diff --git a/cplusplus/common/atlasutil/src/.ipynb_checkpoints/Untitled-checkpoint.ipynb b/cplusplus/common/atlasutil/src/.ipynb_checkpoints/Untitled-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/cplusplus/common/atlasutil/src/.ipynb_checkpoints/Untitled-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/cplusplus/common/atlasutil/src/Untitled.ipynb b/cplusplus/common/atlasutil/src/Untitled.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/cplusplus/common/atlasutil/src/Untitled.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/cplusplus/common/atlasutil/src/acl_device.cpp b/cplusplus/common/atlasutil/src/acl_device.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cb2b7658f6627e4e317a45c17845ef2be240f8c6 --- /dev/null +++ b/cplusplus/common/atlasutil/src/acl_device.cpp @@ -0,0 +1,113 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.cpp +* Description: handle file operations +*/ +#include "atlas_utils.h" +#include "acl_device.h" + +using namespace std; + +AclDevice::AclDevice(): +deviceId_(0), +aclConfig_(""), +runMode_(ACL_HOST), +context_(nullptr), +useDefaultCtx_(true), +isReleased_(false) { +} + +AclDevice::AclDevice(int32_t devId, + const string& aclConfigPath, + bool useDefaultCtx): +deviceId_(devId), +aclConfig_(aclConfigPath), +runMode_(ACL_HOST), +context_(nullptr), +useDefaultCtx_(useDefaultCtx), +isReleased_(false) { +} + +AclDevice::~AclDevice() { + Release(); +} + +AtlasError AclDevice::Init() { + // ACL init + aclError ret = aclInit(aclConfig_.c_str()); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Acl init failed"); + return ATLAS_ERROR; + } + ATLAS_LOG_INFO("Acl init ok"); + + // open device + ret = aclrtSetDevice(deviceId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Acl open device %d failed", deviceId_); + return ATLAS_ERROR; + } + ATLAS_LOG_INFO("Open device %d ok", deviceId_); + + if (useDefaultCtx_) { + ret = aclrtGetCurrentContext(&context_); + if ((ret != ACL_ERROR_NONE) || (context_ == nullptr)) { + ATLAS_LOG_ERROR("Get current acl context error:%d", ret); + return ATLAS_ERROR_GET_ACL_CONTEXT; + } + } else { + ret = aclrtCreateContext(&context_, deviceId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Create acl context failed, error:%d", ret); + return ATLAS_ERROR_CREATE_ACL_CONTEXT; + } + } + + ret = aclrtGetRunMode(&runMode_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acl get run mode failed"); + return ATLAS_ERROR; + } + + return ATLAS_OK; +} + +void AclDevice::Release() { + if (isReleased_) return; + + aclError ret; + if ((useDefaultCtx_ == false) && (context_ != nullptr)) { + ret = aclrtDestroyContext(context_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("destroy context failed"); + } + context_ = nullptr; + } + + ret = aclrtResetDevice(deviceId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("reset device failed"); + } + ATLAS_LOG_INFO("Reset device %d ok", deviceId_); + + ret = aclFinalize(); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("finalize acl failed"); + } + ATLAS_LOG_INFO("Finalize acl ok"); + + isReleased_ = true; +} diff --git a/cplusplus/common/atlasutil/src/atlas_app.cpp b/cplusplus/common/atlasutil/src/atlas_app.cpp new file mode 100644 index 0000000000000000000000000000000000000000..db120e7ba4cd78896172fbeebd6a670f773e761a --- /dev/null +++ b/cplusplus/common/atlasutil/src/atlas_app.cpp @@ -0,0 +1,245 @@ +/** +* @file sample_process.cpp +* +* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +*/ + +#include "acl/acl.h" +#include "atlas_app.h" +#include "atlas_thread_mgr.h" + +using namespace std; + +namespace { +const uint32_t kWaitInterval = 10000; +const uint32_t kThreadExitRetry = 3; +} + +AtlasApp::AtlasApp():isReleased_(false), waitEnd_(false){ + Init(); +} + +AtlasApp::~AtlasApp(){ + ReleaseThreads(); +} + +AtlasError AtlasApp::Init() { + AtlasThreadMgr* thMgr = new AtlasThreadMgr(nullptr, "main"); + threadList_.push_back(thMgr); + thMgr->SetStatus(THREAD_RUNNING); + return ATLAS_OK; +} + +int AtlasApp::CreateAtlasThread(AtlasThread* thInst, const string& instName, + aclrtContext context, aclrtRunMode runMode) +{ + int instId = CreateAtlasThreadMgr(thInst, instName, context, runMode); + if (instId == INVALID_INSTANCE_ID) { + ATLAS_LOG_ERROR("Add thread instance %s failed", instName.c_str()); + return INVALID_INSTANCE_ID; + } + + threadList_[instId]->CreateThread(); + AtlasError ret = threadList_[instId]->WaitThreadInitEnd(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create thread failed, error %d", ret); + return INVALID_INSTANCE_ID; + } + + return instId; +} + +int AtlasApp::CreateAtlasThreadMgr(AtlasThread* thInst, const string& instName, + aclrtContext context, aclrtRunMode runMode){ + if (!CheckThreadNameUnique(instName)) { + ATLAS_LOG_ERROR("The thread instance name is not unique"); + return INVALID_INSTANCE_ID; + } + + int instId = threadList_.size(); + AtlasError ret = thInst->BaseConfig(instId, instName, context, runMode); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create thread instance failed for error %d", ret); + return INVALID_INSTANCE_ID; + } + + AtlasThreadMgr* thMgr = new AtlasThreadMgr(thInst, instName); + threadList_.push_back(thMgr); + + return instId; +} + +bool AtlasApp::CheckThreadNameUnique(const string& threadName) { + if (threadName.size() == 0) return true; + + for (size_t i = 0; i < threadList_.size(); i++) { + if (threadName == threadList_[i]->GetThreadName()) { + return false; + } + } + + return true; +} + +int AtlasApp::Start(vector& threadParamTbl) { + for (size_t i = 0; i < threadParamTbl.size(); i++) { + int instId = CreateAtlasThreadMgr(threadParamTbl[i].threadInst, + threadParamTbl[i].threadInstName, + threadParamTbl[i].context, + threadParamTbl[i].runMode); + if (instId == INVALID_INSTANCE_ID) { + ATLAS_LOG_ERROR("Create thread instance failed"); + return ATLAS_ERROR; + } + threadParamTbl[i].threadInstId = instId; + } + //Note:The instance id must generate first, then create thread, + //for the user thread get other thread instance id in Init function + for (size_t i = 0; i < threadParamTbl.size(); i++) { + threadList_[threadParamTbl[i].threadInstId]->CreateThread(); + } + + for (size_t i = 0; i < threadParamTbl.size(); i++) { + int instId = threadParamTbl[i].threadInstId; + AtlasError ret = threadList_[instId]->WaitThreadInitEnd(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create thread %s failed, error %d", + threadParamTbl[i].threadInstName.c_str(), ret); + return ret; + } + } + + return ATLAS_OK; +} + +int AtlasApp::GetAtlasThreadIdByName(const string& threadName) { + if (threadName.empty()) { + ATLAS_LOG_ERROR("search name is empty"); + return INVALID_INSTANCE_ID; + } + + for (uint32_t i = 0; i < threadList_.size(); i++) { + if (threadList_[i]->GetThreadName() == threadName) { + return i; + } + } + + return INVALID_INSTANCE_ID; +} + +AtlasError AtlasApp::SendMessage(int dest, int msgId, shared_ptr data) { + if ((uint32_t)dest > threadList_.size()) { + ATLAS_LOG_ERROR("Send message to %d failed for thread not exist", dest); + return ATLAS_ERROR_DEST_INVALID; + } + + shared_ptr pMessage = make_shared(); + pMessage->dest = dest; + pMessage->msgId = msgId; + pMessage->data = data; + + return threadList_[dest]->PushMsgToQueue(pMessage); +} + +void AtlasApp::Wait() { + while (true) { + usleep(kWaitInterval); + if (waitEnd_) break; + } + threadList_[kMainThreadId]->SetStatus(THREAD_EXITED); +} + +bool AtlasApp::CheckThreadAbnormal() { + for (size_t i = 0; i < threadList_.size(); i++) { + if (threadList_[i]->GetStatus() == THREAD_ERROR) { + return true; + } + } + + return false; +} + +void AtlasApp::Wait(AtlasMsgProcess msgProcess, void* param) { + AtlasThreadMgr* mainMgr = threadList_[0]; + + if (mainMgr == nullptr) { + ATLAS_LOG_ERROR("Atlas app wait exit for message process function is nullptr"); + return; + } + + while (true) { + if (waitEnd_) break; + + shared_ptr msg = mainMgr->PopMsgFromQueue(); + if (msg == nullptr) { + usleep(kWaitInterval); + continue; + } + int ret = msgProcess(msg->msgId, msg->data, param); + if (ret) { + ATLAS_LOG_ERROR("Atlas app exit for message %d process error:%d", msg->msgId, ret); + break; + } + } + threadList_[kMainThreadId]->SetStatus(THREAD_EXITED); +} + +void AtlasApp::Exit() { + ReleaseThreads(); +} + +void AtlasApp::ReleaseThreads() { + if (isReleased_) return; + threadList_[kMainThreadId]->SetStatus(THREAD_EXITED); + + for (uint32_t i = 1; i < threadList_.size(); i++) { + if ((threadList_[i] != nullptr) && + (threadList_[i]->GetStatus() == THREAD_RUNNING)) + threadList_[i]->SetStatus(THREAD_EXITING); + } + + int retry = kThreadExitRetry; + while(retry >= 0) { + bool exitFinish = true; + for (uint32_t i = 0; i < threadList_.size(); i++) { + if (threadList_[i] == nullptr) + continue; + if (threadList_[i]->GetStatus() > THREAD_EXITING) { + delete threadList_[i]; + threadList_[i] = nullptr; + ATLAS_LOG_INFO("Atlas thread %d released", i); + } else { + exitFinish = false; + } + } + + if (exitFinish) + break; + + sleep(1); + retry--; + } + isReleased_ = true; +} + +AtlasApp& CreateAtlasAppInstance() { + return AtlasApp::GetInstance(); +} + +AtlasApp& GetAtlasAppInstance() { + return AtlasApp::GetInstance(); +} + +AtlasError SendMessage(int dest, int msgId, shared_ptr data) { + AtlasApp& app = AtlasApp::GetInstance(); + return app.SendMessage(dest, msgId, data); +} + +int GetAtlasThreadIdByName(const string& threadName) { + AtlasApp& app = AtlasApp::GetInstance(); + return app.GetAtlasThreadIdByName(threadName); +} diff --git a/cplusplus/common/atlasutil/src/atlas_model.cpp b/cplusplus/common/atlasutil/src/atlas_model.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9656dce6934694ef471531e5440d4b8305fba469 --- /dev/null +++ b/cplusplus/common/atlasutil/src/atlas_model.cpp @@ -0,0 +1,374 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File model_process.cpp +* Description: handle model process +*/ +#include "atlas_model.h" +#include +#include "atlas_utils.h" +using namespace std; + +AtlasModel::AtlasModel() +:modelPath_(""), loadFlag_(false), modelId_(0), modelMemPtr_(nullptr), +modelMemSize_(0),modelWeightPtr_(nullptr),modelWeightSize_(0), +modelDesc_(nullptr), input_(nullptr), output_(nullptr), outputsNum_(0), +isReleased_(false) { +} + +AtlasModel::AtlasModel(const string& modelPath) +:modelPath_(modelPath), loadFlag_(false), modelId_(0), modelMemPtr_(nullptr), +modelMemSize_(0),modelWeightPtr_(nullptr),modelWeightSize_(0), +modelDesc_(nullptr), input_(nullptr), output_(nullptr), outputsNum_(0), +isReleased_(false) { +} + +AtlasModel::~AtlasModel() { + DestroyResource(); +} + +void AtlasModel::DestroyResource() { + if (isReleased_) { + return; + } + + Unload(); + DestroyDesc(); + DestroyInput(); + DestroyOutput(); + isReleased_ = true; +} + + +AtlasError AtlasModel::Init() { + aclError aclRet = aclrtGetRunMode(&runMode_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acl get run mode failed"); + return ATLAS_ERROR_GET_RUM_MODE; + } + + AtlasError ret = LoadModelFromFile(modelPath_); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Load model from file failed, error: %d", ret); + return ret; + } + + ret = CreateDesc(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("execute CreateDesc failed"); + return ret; + } + + ret = CreateOutput(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("execute CreateOutput failed"); + return ret; + } + + ATLAS_LOG_INFO("Init model %s success", modelPath_.c_str()); + + return ATLAS_OK; +} + +AtlasError AtlasModel::Init(const string& modelPath) { + modelPath_.assign(modelPath.c_str()); + return Init(); +} + +AtlasError AtlasModel::LoadModelFromFile(const string& modelPath) { + if (loadFlag_) { + ATLAS_LOG_ERROR("%s is loaded already", modelPath.c_str()); + return ATLAS_ERROR_LOAD_MODEL_REPEATED; + } + + aclError ret = aclmdlLoadFromFile(modelPath.c_str(), &modelId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Load model(%s) from file return %d", + modelPath.c_str(), ret); + return ATLAS_ERROR_LOAD_MODEL; + } + + loadFlag_ = true; + ATLAS_LOG_INFO("Load model %s success", modelPath.c_str()); + + return ATLAS_OK; +} + +AtlasError AtlasModel::CreateDesc() { + modelDesc_ = aclmdlCreateDesc(); + if (modelDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create model(%s) description failed", + modelPath_.c_str()); + return ATLAS_ERROR_CREATE_MODEL_DESC; + } + + aclError ret = aclmdlGetDesc(modelDesc_, modelId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Get model(%s) description failed", + modelPath_.c_str()); + return ATLAS_ERROR_GET_MODEL_DESC; + } + + ATLAS_LOG_INFO("Create model description success"); + return ATLAS_OK; +} + +void AtlasModel::DestroyDesc() +{ + if (modelDesc_ != nullptr) { + (void)aclmdlDestroyDesc(modelDesc_); + modelDesc_ = nullptr; + } +} + +AtlasError AtlasModel::CreateInput(void *input, uint32_t size) { + vector inputData = {{input, size}}; + return CreateInput(inputData); +} + +AtlasError AtlasModel::CreateInput(void *input1, uint32_t input1size, + void* input2, uint32_t input2size) { + vector inputData = {{input1, input1size}, {input2, input2size}}; + return CreateInput(inputData); +} + +AtlasError AtlasModel::CreateInput(vector& inputData) { + uint32_t dataNum = inputData.size(); + + if (dataNum == 0) { + ATLAS_LOG_ERROR("Create input failed for no input data"); + return ATLAS_ERROR_INVALID_ARGS; + } + + input_ = aclmdlCreateDataset(); + if (input_ == nullptr) { + ATLAS_LOG_ERROR("Create input failed for create dataset failed"); + return ATLAS_ERROR_CREATE_DATASET; + } + + for (uint32_t i = 0; i < inputData.size(); i++) { + AtlasError atlRet = AddDatasetBuffer(input_, + inputData[i].data, + inputData[i].size); + if (atlRet != ATLAS_OK) { + ATLAS_LOG_ERROR("Create input failed for " + "add dataset buffer error %d", atlRet); + return ATLAS_ERROR_ADD_DATASET_BUFFER; + } + } + + return ATLAS_OK; +} + +AtlasError AtlasModel::CreateOutput() +{ + if (modelDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create output failed for no model(%s) description", + modelPath_.c_str()); + return ATLAS_ERROR_NO_MODEL_DESC; + } + + output_ = aclmdlCreateDataset(); + if (output_ == nullptr) { + ATLAS_LOG_ERROR("Create output failed for create dataset error"); + return ATLAS_ERROR_CREATE_DATASET; + } + + outputsNum_ = aclmdlGetNumOutputs(modelDesc_); + for (size_t i = 0; i < outputsNum_; ++i) { + size_t bufSize = aclmdlGetOutputSizeByIndex(modelDesc_, i); + + void *outputBuffer = nullptr; + aclError ret = aclrtMalloc(&outputBuffer, bufSize, + ACL_MEM_MALLOC_NORMAL_ONLY); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Create output failed for malloc " + "device failed, size %d", (int)bufSize); + return ATLAS_ERROR_MALLOC_DEVICE; + } + + AtlasError atlRet = AddDatasetBuffer(output_, outputBuffer, bufSize); + if (atlRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Create output failed for " + "add dataset buffer error %d", atlRet); + aclrtFree(outputBuffer); + return ATLAS_ERROR_ADD_DATASET_BUFFER; + } + } + + ATLAS_LOG_INFO("Create model(%s) output success", modelPath_.c_str()); + return ATLAS_OK; +} + +AtlasError AtlasModel::AddDatasetBuffer(aclmdlDataset *dataset, + void* buffer, uint32_t bufferSize) { + aclDataBuffer* dataBuf = aclCreateDataBuffer(buffer, bufferSize); + if (dataBuf == nullptr) { + ATLAS_LOG_ERROR("Create data buffer error"); + return ATLAS_ERROR_CREATE_DATA_BUFFER; + } + + aclError ret = aclmdlAddDatasetBuffer(dataset, dataBuf); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Add dataset buffer error %d", ret); + aclDestroyDataBuffer(dataBuf); + return ATLAS_ERROR_ADD_DATASET_BUFFER; + } + + return ATLAS_OK; +} + +AtlasError AtlasModel::Execute(vector& inferOutputs, + void *data, uint32_t size) { + AtlasError ret = CreateInput(data, size); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create mode input dataset failed"); + return ret; + } + + ret = Execute(inferOutputs); + if (ret != ATLAS_OK) { + DestroyInput(); + ATLAS_LOG_ERROR("Execute model inference failed"); + return ret; + } + DestroyInput(); + + return ATLAS_OK; +} + +AtlasError AtlasModel::Execute(vector& inferOutputs) { + aclError ret = aclmdlExecute(modelId_, input_, output_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Execute model(%s) error:%d", modelPath_.c_str(), ret); + return ATLAS_ERROR_EXECUTE_MODEL; + } + + for (uint32_t i = 0; i < outputsNum_; i++) { + InferenceOutput out; + AtlasError ret = GetOutputItem(out, i); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Get the %dth interference output failed, " + "error: %d", i, ret); + return ret; + } + inferOutputs.push_back(out); + } + + return ATLAS_OK; +} + +AtlasError AtlasModel::GetOutputItem(InferenceOutput& out, + uint32_t idx) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output_, idx); + if (dataBuffer == nullptr) { + ATLAS_LOG_ERROR("Get the %dth dataset buffer from model " + "inference output failed", idx); + return ATLAS_ERROR_GET_DATASET_BUFFER; + } + + void* dataBufferDev = aclGetDataBufferAddr(dataBuffer); + if (dataBufferDev == nullptr) { + ATLAS_LOG_ERROR("Get the %dth dataset buffer address " + "from model inference output failed", idx); + return ATLAS_ERROR_GET_DATA_BUFFER_ADDR; + } + + size_t bufferSize = aclGetDataBufferSize(dataBuffer); + if (bufferSize == 0) { + ATLAS_LOG_ERROR("The %dth dataset buffer size of " + "model inference output is 0", idx); + return ATLAS_ERROR_GET_DATA_BUFFER_SIZE; + } + + void* data = CopyDataToHost(dataBufferDev, bufferSize, + runMode_, MEMORY_NORMAL); + if (data == nullptr) { + ATLAS_LOG_ERROR("Copy inference output to host failed"); + return ATLAS_ERROR_COPY_DATA; + } + + out.data = SHARED_PRT_U8_BUF(data); + out.size = bufferSize; + + return ATLAS_OK; +} + +void AtlasModel::DestroyInput() +{ + if (input_ == nullptr) { + return; + } + + for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(input_); ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(input_, i); + aclDestroyDataBuffer(dataBuffer); + } + aclmdlDestroyDataset(input_); + input_ = nullptr; +} + +void AtlasModel::DestroyOutput() +{ + if (output_ == nullptr) { + return; + } + + for (size_t i = 0; i < aclmdlGetDatasetNumBuffers(output_); ++i) { + aclDataBuffer* dataBuffer = aclmdlGetDatasetBuffer(output_, i); + void* data = aclGetDataBufferAddr(dataBuffer); + (void)aclrtFree(data); + (void)aclDestroyDataBuffer(dataBuffer); + } + + (void)aclmdlDestroyDataset(output_); + output_ = nullptr; +} + +void AtlasModel::Unload() +{ + if (!loadFlag_) { + ATLAS_LOG_INFO("Model(%s) had not been loaded or unload already", + modelPath_.c_str()); + return; + } + + aclError ret = aclmdlUnload(modelId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Unload model(%s) error:%d", modelPath_.c_str(), ret); + } + + if (modelDesc_ != nullptr) { + (void)aclmdlDestroyDesc(modelDesc_); + modelDesc_ = nullptr; + } + + if (modelMemPtr_ != nullptr) { + aclrtFree(modelMemPtr_); + modelMemPtr_ = nullptr; + modelMemSize_ = 0; + } + + if (modelWeightPtr_ != nullptr) { + aclrtFree(modelWeightPtr_); + modelWeightPtr_ = nullptr; + modelWeightSize_ = 0; + } + + loadFlag_ = false; + ATLAS_LOG_INFO("Unload model %s success", modelPath_.c_str()); +} + + diff --git a/cplusplus/common/atlasutil/src/atlas_thread.cpp b/cplusplus/common/atlasutil/src/atlas_thread.cpp new file mode 100644 index 0000000000000000000000000000000000000000..bd58968323a4d44b8ca4743c2a48e64a4392bcb3 --- /dev/null +++ b/cplusplus/common/atlasutil/src/atlas_thread.cpp @@ -0,0 +1,43 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.cpp +* Description: handle file operations +*/ +#include "atlas_thread.h" + +AtlasThread::AtlasThread() +:context_(nullptr), runMode_(ACL_HOST), +instanceId_(INVALID_INSTANCE_ID), instanceName_(""), baseConfiged_(false) { +} + +AtlasError AtlasThread::BaseConfig(int instanceId, + const string& threadName, + aclrtContext context, + aclrtRunMode runMode) { + if (baseConfiged_) { + return ATLAS_ERROR_INITED_ALREADY; + } + + instanceId_ = instanceId; + instanceName_.assign(threadName.c_str()); + context_ = context; + runMode_ = runMode; + + baseConfiged_ = true; + + return ATLAS_OK; +} + diff --git a/cplusplus/common/atlasutil/src/atlas_thread_mgr.cpp b/cplusplus/common/atlasutil/src/atlas_thread_mgr.cpp new file mode 100644 index 0000000000000000000000000000000000000000..51b8890f8099cf19145dff92f04e4146ee4a2744 --- /dev/null +++ b/cplusplus/common/atlasutil/src/atlas_thread_mgr.cpp @@ -0,0 +1,107 @@ +#include "atlas_thread_mgr.h" +#include "atlas_utils.h" + +namespace { + const uint32_t kMsgQueueSize = 256; + const uint32_t kWait10Milliseconds = 10000; + const uint32_t kWaitThreadStart = 1000; +} + + +AtlasThreadMgr::AtlasThreadMgr(AtlasThread* userThreadInstance, const string& threadName) +:name_(threadName), +userInstance_(userThreadInstance), +isExit_(false), +status_(THREAD_READY), +msgQueue_(kMsgQueueSize) { +} + +AtlasThreadMgr::~AtlasThreadMgr() { + userInstance_ = nullptr; + while(!msgQueue_.Empty()) { + msgQueue_.Pop(); + } +} + +void AtlasThreadMgr::CreateThread() { + thread engine(&AtlasThreadMgr::ThreadEntry, (void *)this); + engine.detach(); +} + +void AtlasThreadMgr::ThreadEntry(void* arg){ + AtlasThreadMgr* thMgr = (AtlasThreadMgr*)arg; + AtlasThread* userInstance = thMgr->GetUserInstance(); + if (userInstance == nullptr) { + ATLAS_LOG_ERROR("Atlas thread exit for user thread instance is null"); + return; + } + + string& instName = userInstance->SelfInstanceName(); + aclrtContext context = userInstance->GetContext(); + aclError aclRet = aclrtSetCurrentContext(context); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Thread %s set context failed, error: %d", + instName.c_str(), aclRet); + return; + } + + int ret = userInstance->Init(); + if (ret) { + ATLAS_LOG_ERROR("Thread %s init error %d, thread exit", + instName.c_str(), ret); + thMgr->SetStatus(THREAD_ERROR); + return; + } + + thMgr->SetStatus(THREAD_RUNNING); + while(THREAD_RUNNING == thMgr->GetStatus()) { + // 从队列中取数据 + shared_ptr msg = thMgr->PopMsgFromQueue(); + if(msg == nullptr) { + usleep(kWait10Milliseconds); + continue; + } + // 线程消息处理函数 + ret = userInstance->Process(msg->msgId, msg->data); + msg->data = nullptr; + if (ret) { + ATLAS_LOG_ERROR("Thread %s process function return " + "error %d, thread exit", instName.c_str(), ret); + thMgr->SetStatus(THREAD_ERROR); + return; + } + usleep(0); + } + thMgr->SetStatus(THREAD_EXITED); + + return; +} + +AtlasError AtlasThreadMgr::WaitThreadInitEnd() { + while(true) { + if (status_ == THREAD_RUNNING) { + break; + } else if (status_ > THREAD_RUNNING) { + string& instName = userInstance_->SelfInstanceName(); + ATLAS_LOG_ERROR("Thread instance %s status change to %d, " + "app start failed", instName.c_str(), status_); + return ATLAS_ERROR_START_THREAD; + } else { + usleep(kWaitThreadStart); + } + } + + return ATLAS_OK; +} + +AtlasError AtlasThreadMgr::PushMsgToQueue(shared_ptr& pMessage) { + if (status_ != THREAD_RUNNING) { + ATLAS_LOG_ERROR("Thread instance %s status(%d) is invalid, " + "can not reveive message", name_.c_str(), status_); + return ATLAS_ERROR_THREAD_ABNORMAL; + } + return msgQueue_.Push(pMessage)? ATLAS_OK : ATLAS_ERROR_ENQUEUE; +} + + + diff --git a/cplusplus/common/atlasutil/src/atlas_utils.cpp b/cplusplus/common/atlasutil/src/atlas_utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8377078f090d9cbbdebf62e94279693b11f306a7 --- /dev/null +++ b/cplusplus/common/atlasutil/src/atlas_utils.cpp @@ -0,0 +1,431 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File utils.cpp +* Description: handle file operations +*/ +#include "atlas_utils.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +using namespace std; + +namespace { +const std::string kImagePathSeparator = ","; +const int kStatSuccess = 0; +const std::string kFileSperator = "/"; +const std::string kPathSeparator = "/"; +// output image prefix +const std::string kOutputFilePrefix = "out_"; + +const string kRegexIpAddr = + "^(1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|[0-9])\\." + "(1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|\\d)\\." + "(1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|\\d)\\." + "(1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|\\d)" + ":([1-9]|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-5]\\d{4}|" + "6[0-4]\\d{3}|65[0-4]\\d{2}|655[0-2]\\d|6553[0-5])$"; + +// regex for verify video file name +const string kRegexVideoFile = "^.+\\.(mp4|h264|h265)$"; + +// regex for verify RTSP rtsp://ip:port/channelname +//const string kRegexRtsp = "^rtsp://[0-9a-zA-Z]*:[0-9a-zA-Z]*@.+"; +const string kRegexRtsp = "^rtsp://.*"; + + +} + +bool IsDigitStr(const string& str) { + for (uint32_t i = 0; i &pathVec) { + char *charPath = const_cast(path.c_str()); + const char *charSplit = kImagePathSeparator.c_str(); + char *imageFile = strtok(charPath, charSplit); + while (imageFile) { + pathVec.emplace_back(imageFile); + imageFile = strtok(nullptr, charSplit); + } +} + +void GetPathFiles(const string &path, vector &fileVec) { + struct dirent *direntPtr = nullptr; + DIR *dir = nullptr; + if (IsDirectory(path)) { + dir = opendir(path.c_str()); + while ((direntPtr = readdir(dir)) != nullptr) { + // skip . and .. + if (direntPtr->d_name[0] == '.') { + continue; + } + + // file path + string fullPath = path + kPathSeparator + direntPtr->d_name; + // directory need recursion + if (IsDirectory(fullPath)) { + GetPathFiles(fullPath, fileVec); + } else { + // put file + fileVec.emplace_back(fullPath); + } + } + } + else { + fileVec.emplace_back(path); + } +} + +void GetAllFiles(const string &pathList, vector &fileVec) { + // split file path + vector pathVec; + SplitPath(pathList, pathVec); + + for (string everyPath : pathVec) { + // check path exist or not + if (!IsPathExist(pathList)) { + ATLAS_LOG_ERROR("Failed to deal path=%s. Reason: not exist or can not access.", + everyPath.c_str()); + continue; + } + // get files in path and sub-path + GetPathFiles(everyPath, fileVec); + } +} + +void* MallocMemory(uint32_t dataSize, MemoryType memType) { + void* buffer = nullptr; + aclError aclRet = ACL_ERROR_NONE; + + switch(memType){ + case MEMORY_NORMAL: + buffer = new uint8_t[dataSize]; + break; + case MEMORY_HOST: + aclRet = aclrtMallocHost(&buffer, dataSize); + break; + case MEMORY_DEVICE: + aclRet = aclrtMalloc(&buffer, dataSize, ACL_MEM_MALLOC_HUGE_FIRST); + break; + case MEMORY_DVPP: + aclRet = acldvppMalloc(&buffer, dataSize); + break; + default: + ATLAS_LOG_ERROR("Invalid memory type %d", memType); + aclRet = ACL_ERROR_INVALID_PARAM; + break; + } + + if ((aclRet != ACL_ERROR_NONE) || (buffer == nullptr)) { + ATLAS_LOG_ERROR("Malloc memory failed, type: %d, errorno:%d", + memType, aclRet); + return nullptr; + } + + return buffer; +} + +void FreeMemory(void* mem, MemoryType memType) { + switch(memType){ + case MEMORY_NORMAL: + delete[]((uint8_t *)mem); + break; + case MEMORY_HOST: + aclrtFreeHost(mem); + break; + case MEMORY_DEVICE: + aclrtFree(mem); + break; + case MEMORY_DVPP: + acldvppFree(mem); + break; + default: + ATLAS_LOG_ERROR("Invalid memory type %d", memType); + break; + } +} + +aclrtMemcpyKind GetCopyPolicy(aclrtRunMode srcDev, + CopyDirection direct, MemoryType memType) { + aclrtMemcpyKind policy = ACL_MEMCPY_HOST_TO_HOST; + + if (direct == TO_DEVICE) { + if (srcDev == ACL_HOST) + policy = ACL_MEMCPY_HOST_TO_DEVICE; + else + policy = ACL_MEMCPY_DEVICE_TO_DEVICE; + } else {//TO_HOST + if (srcDev == ACL_HOST) + policy = ACL_MEMCPY_DEVICE_TO_HOST; + else + policy = ACL_MEMCPY_DEVICE_TO_DEVICE; + } + + return policy; +} + +void* CopyDataToDevice(const void* data, uint32_t size, + aclrtRunMode curRunMode, MemoryType memType) { + if ((data == nullptr) || (size == 0) || + ((curRunMode != ACL_HOST) && (curRunMode != ACL_DEVICE)) || + (memType >= MEMORY_INVALID_TYPE) || (memType == MEMORY_HOST)) { + ATLAS_LOG_ERROR("Copy data args invalid, data %p, " + "size %d, src dev %d, memory type %d", + data, size, curRunMode, memType); + return nullptr; + } + + aclrtMemcpyKind policy = GetCopyPolicy(curRunMode, TO_DEVICE, memType); + + return CopyData(data, size, policy, memType); +} + +AtlasError CopyDataToDeviceEx(void* dest, uint32_t destSize, + const void* src, uint32_t srcSize, + aclrtRunMode runMode) { + aclrtMemcpyKind policy = ACL_MEMCPY_HOST_TO_DEVICE; + if (runMode == ACL_DEVICE) { + policy = ACL_MEMCPY_DEVICE_TO_DEVICE; + } + + aclError aclRet = aclrtMemcpy(dest, destSize, src, srcSize, policy); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Copy data to device failed, aclRet is %d", aclRet); + return ATLAS_ERROR; + } + + return ATLAS_OK; +} + +void* CopyDataToHost(const void* data, uint32_t size, + aclrtRunMode curRunMode, MemoryType memType) { + if ((data == nullptr) || (size == 0) || + ((curRunMode != ACL_HOST) && (curRunMode != ACL_DEVICE)) || + ((memType != MEMORY_HOST) && (memType != MEMORY_NORMAL))) { + ATLAS_LOG_ERROR("Copy data args invalid, data %p, " + "size %d, src dev %d, memory type %d", + data, size, curRunMode, memType); + return nullptr; + } + + aclrtMemcpyKind policy = GetCopyPolicy(curRunMode, TO_HOST, memType); + + return CopyData(data, size, policy, memType); +} + +AtlasError CopyDataToHostEx(void* dest, uint32_t destSize, + const void* src, uint32_t srcSize, + aclrtRunMode runMode) { + aclrtMemcpyKind policy = ACL_MEMCPY_DEVICE_TO_HOST; + if (runMode == ACL_DEVICE) { + policy = ACL_MEMCPY_DEVICE_TO_DEVICE; + } + + aclError aclRet = aclrtMemcpy(dest, destSize, src, srcSize, policy); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Copy data to device failed, aclRet is %d", aclRet); + return ATLAS_ERROR; + } + + return ATLAS_OK; +} + +void* CopyData(const void* data, uint32_t size, + aclrtMemcpyKind policy, MemoryType memType) { + void* buffer = MallocMemory(size, memType); + if (buffer == nullptr) return nullptr; + + aclError aclRet = aclrtMemcpy(buffer, size, data, size, policy); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Copy data to device failed, aclRet is %d", aclRet); + FreeMemory(buffer, memType); + return nullptr; + } + + return buffer; +} + +AtlasError CopyImageToLocal(ImageData& destImage, + ImageData& srcImage, aclrtRunMode curRunMode) { + void* data = CopyDataToHost(srcImage.data.get(), srcImage.size, + curRunMode, MEMORY_NORMAL); + if (data == nullptr) { + return ATLAS_ERROR_COPY_DATA; + } + + destImage.format = srcImage.format; + destImage.width = srcImage.width; + destImage.height = srcImage.height; + destImage.size = srcImage.size; + destImage.alignWidth = srcImage.alignWidth; + destImage.alignHeight = srcImage.alignHeight; + destImage.data = SHARED_PRT_U8_BUF(data); + + return ATLAS_OK; +} + +AtlasError CopyImageToDevice(ImageData& destImage, ImageData& srcImage, + aclrtRunMode curRunMode, MemoryType memType) { + void* data = CopyDataToDevice(srcImage.data.get(), srcImage.size, + curRunMode, memType); + if (data == nullptr) { + return ATLAS_ERROR_COPY_DATA; + } + + destImage.format = srcImage.format; + destImage.width = srcImage.width; + destImage.height = srcImage.height; + destImage.size = srcImage.size; + destImage.alignWidth = srcImage.alignWidth; + destImage.alignHeight = srcImage.alignHeight; + + if(memType == MEMORY_DEVICE) + destImage.data = SHARED_PRT_DEV_BUF(data); + else + destImage.data = SHARED_PRT_DVPP_BUF(data); + + return ATLAS_OK; +} + +AtlasError ReadBinFile(const string& fileName, void* data, uint32_t& size) { + struct stat sBuf; + int fileStatus = stat(fileName.data(), &sBuf); + if (fileStatus == -1) { + ATLAS_LOG_ERROR("failed to get file"); + return ATLAS_ERROR_ACCESS_FILE; + } + if (S_ISREG(sBuf.st_mode) == 0) { + ATLAS_LOG_ERROR("%s is not a file, please enter a file", + fileName.c_str()); + return ATLAS_ERROR_INVALID_FILE; + } + std::ifstream binFile(fileName, std::ifstream::binary); + if (binFile.is_open() == false) { + ATLAS_LOG_ERROR("open file %s failed", fileName.c_str()); + return ATLAS_ERROR_OPEN_FILE; + } + + binFile.seekg(0, binFile.end); + uint32_t binFileBufferLen = binFile.tellg(); + if (binFileBufferLen == 0) { + ATLAS_LOG_ERROR("binfile is empty, filename is %s", fileName.c_str()); + binFile.close(); + return ATLAS_ERROR_INVALID_FILE; + } + + binFile.seekg(0, binFile.beg); + + uint8_t* binFileBufferData = new(std::nothrow) uint8_t[binFileBufferLen]; + if (binFileBufferData == nullptr) { + ATLAS_LOG_ERROR("malloc binFileBufferData failed"); + binFile.close(); + return ATLAS_ERROR_MALLOC; + } + binFile.read((char *)binFileBufferData, binFileBufferLen); + binFile.close(); + + data = binFileBufferData; + size = binFileBufferLen; + + return ATLAS_OK; +} + +AtlasError ReadJpeg(ImageData& image, std::string& fileName) { + uint32_t size = 0; + void* buf = nullptr; + + ReadBinFile(fileName, buf, size); + + int32_t ch = 0; + acldvppJpegGetImageInfo(buf, size, + &(image.width), &(image.height), &ch); + image.data.reset((uint8_t *)buf, [](uint8_t* p) { delete[](p); }); + image.size = size; + + return ATLAS_OK; +} + +void SaveBinFile(const string& filename, const void* data, uint32_t size) { + FILE *outFileFp = fopen(filename.c_str(), "wb+"); + if (outFileFp == nullptr) { + ATLAS_LOG_ERROR("Save file %s failed for open error", filename.c_str()); + return; + } + fwrite(data, 1, size, outFileFp); + + fflush(outFileFp); + fclose(outFileFp); +} diff --git a/cplusplus/common/atlasutil/src/atlas_videocapture.cpp b/cplusplus/common/atlasutil/src/atlas_videocapture.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5d72c195b8e7bbfca7efe5b325301e76419c87ee --- /dev/null +++ b/cplusplus/common/atlasutil/src/atlas_videocapture.cpp @@ -0,0 +1,131 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include "atlas_utils.h" +#include "atlas_videocapture.h" + +#ifdef ENABLE_BOARD_CAMARE +#include "camera.h" +#endif + +#include "video_decode.h" + + +using namespace std; + +AtlasVideoCapture::AtlasVideoCapture():cap_(nullptr) { +#ifdef ENABLE_BOARD_CAMARE + cap_ = new Camera(1280, 720, 20); + Open(); +#endif +} + +AtlasVideoCapture::AtlasVideoCapture(uint32_t cameraId, uint32_t width, + uint32_t height, uint32_t fps) +:cap_(nullptr) { +#ifdef ENABLE_BOARD_CAMARE + cap_ = new Camera(cameraId, width, height, fps); + Open(); +#endif +} + +AtlasVideoCapture::~AtlasVideoCapture() { + if (cap_ != nullptr) { + Close(); + delete cap_; + cap_ = nullptr; + } +} + +AtlasVideoCapture::AtlasVideoCapture(const string& videoPath, aclrtContext context){ + cap_ = new VideoDecode(videoPath, context); + Open(); +} + +bool AtlasVideoCapture::IsOpened() { + if (cap_ != nullptr) { + return cap_->IsOpened(); + } else { + return false; + } +} + +AtlasError AtlasVideoCapture::Set(StreamProperty key, uint32_t value) { + if (cap_ != nullptr) { + return cap_->Set(key, value); + } else { + return ATLAS_ERROR_UNSURPPORT_VIDEO_CAPTURE; + } +} + +uint32_t AtlasVideoCapture::Get(StreamProperty key) { + if (cap_ != nullptr) { + return cap_->Get(key); + } else { + return 0; + } +} + +AtlasError AtlasVideoCapture::Read(ImageData& frame) { + if (cap_ != nullptr) { + return cap_->Read(frame); + } else { + return ATLAS_ERROR_UNSURPPORT_VIDEO_CAPTURE; + } +} + +AtlasError AtlasVideoCapture::Close() { + if (cap_ != nullptr) { + return cap_->Close(); + } else { + return ATLAS_ERROR_UNSURPPORT_VIDEO_CAPTURE; + } +} + +AtlasError AtlasVideoCapture::Open() { + if (cap_ != nullptr) { + return cap_->Open(); + } else { + return ATLAS_ERROR_UNSURPPORT_VIDEO_CAPTURE; + } +} + + + diff --git a/cplusplus/common/atlasutil/src/camera.cpp b/cplusplus/common/atlasutil/src/camera.cpp new file mode 100644 index 0000000000000000000000000000000000000000..357f6d0b1c7f04360b9124e8bea0f8df57cf0243 --- /dev/null +++ b/cplusplus/common/atlasutil/src/camera.cpp @@ -0,0 +1,294 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include "atlas_utils.h" + + +using namespace std; + +extern "C" { +#include "peripheral_api.h" +#include "camera.h" + + +CameraResolution gCameraResTbl[] = {{1920, 1080}, + {1280, 720}, + {704, 576}, + {704, 288}, + {352, 288}}; + +Camera::Camera(uint32_t width, uint32_t height, uint32_t fps) +: width_(width), height_(height), +size_(YUV420SP_SIZE(width_, height_)), fps_(fps) { + MediaLibInit(); + if (IsAccessible(CAMERA_ID_0)) { + id_ = CAMERA_ID_0; + } else if (IsAccessible(CAMERA_ID_1)) { + id_ = CAMERA_ID_1; + } else { + id_ = CAMERA_ID_INVALID; + ATLAS_LOG_ERROR("No camera accessable in device"); + } + + if (id_ != CAMERA_ID_INVALID) + ATLAS_LOG_INFO("No specified carmera id, use camera%d", id_); +} + +Camera::Camera(uint32_t id, uint32_t width, uint32_t height, uint32_t fps) +: id_(id), width_(width), height_(height), +size_(YUV420SP_SIZE(width_, height_)), fps_(fps){ + MediaLibInit(); +} + +bool Camera::IsAccessible(uint32_t id) { + CameraStatus status = QueryCameraStatus(id); + if (status == CAMERA_STATUS_OPEN) { + return true; + } + + if (status == CAMERA_STATUS_CLOSED) { + if (LIBMEDIA_STATUS_FAILED == OpenCamera(id)) { + ATLAS_LOG_ERROR("Open camera %d failed when test accessable", id); + return false; + } + if (LIBMEDIA_STATUS_FAILED == CloseCamera(id)) { + ATLAS_LOG_ERROR("Close camera %d failed when test accessable", id); + return false; + } + return true; + } + + return false; +} + +bool Camera::IsValidWidth(int width) { + for (uint32_t i = 0; i < SIZEOF_ARRAY(gCameraResTbl); i++) { + if (gCameraResTbl[i].width == width) + return true; + } + + return false; +} + +bool Camera::IsValidHeight(int height) { + for (uint32_t i = 0; i < SIZEOF_ARRAY(gCameraResTbl); i++) { + if (gCameraResTbl[i].height == height) + return true; + } + + return false; +} + +bool Camera::IsValidFps(int fps) { + return (fps > 0) && (fps <= 20); +} + +AtlasError Camera::SetProperty() { + int ret = SetCameraProperty(id_, CAMERA_PROP_FPS, &(fps_)); + if (ret == LIBMEDIA_STATUS_FAILED) { + ATLAS_LOG_ERROR("Set camera fps failed"); + return ATLAS_ERROR_SET_CAMERA; + } + + CameraResolution resolution; + resolution.width = width_; + resolution.height = height_; + + ret = SetCameraProperty(id_, CAMERA_PROP_RESOLUTION, &resolution); + if (ret == LIBMEDIA_STATUS_FAILED) { + ATLAS_LOG_ERROR("Set camera resolution failed"); + return ATLAS_ERROR_SET_CAMERA; + } + + CameraCapMode mode = CAMERA_CAP_ACTIVE; + ret = SetCameraProperty(id_, CAMERA_PROP_CAP_MODE, &mode); + if (ret == LIBMEDIA_STATUS_FAILED) { + ATLAS_LOG_ERROR("Set camera mode:%d failed", mode); + return ATLAS_ERROR_SET_CAMERA; + } + + return ATLAS_OK; +} + +AtlasError Camera::Open() { + if (id_ == CAMERA_ID_INVALID) { + ATLAS_LOG_ERROR("No camera is accessiable"); + return ATLAS_ERROR_CAMERA_NO_ACCESSABLE; + } + + CameraStatus status = QueryCameraStatus(id_); + if ((status == CAMERA_NOT_EXISTS) || (status == CAMERA_STATUS_UNKOWN)) { + ATLAS_LOG_ERROR("Camera %d status is error %d", id_, status); + return ATLAS_ERROR_CAMERA_NO_ACCESSABLE; + } + + if ((status != CAMERA_STATUS_OPEN) && + (LIBMEDIA_STATUS_FAILED == OpenCamera(id_))) { + ATLAS_LOG_ERROR("Open camera %d failed.", id_); + return ATLAS_ERROR_OPEN_CAMERA; + } + + AtlasError ret = SetProperty(); + if (ret != ATLAS_OK) { + Close(); + ATLAS_LOG_ERROR("Set camera%d property failed", id_); + return ret; + } + + ATLAS_LOG_INFO("Open camera %d success", id_); + + return ATLAS_OK; +} + +bool Camera::IsOpened() { + if (id_ == CAMERA_ID_INVALID) { + return false; + } + + return (CAMERA_STATUS_OPEN == QueryCameraStatus(id_)); +} + +AtlasError Camera::Read(ImageData& image) { + if (id_ == CAMERA_ID_INVALID) { + return ATLAS_ERROR_CAMERA_NO_ACCESSABLE; + } + + int size = (int)size_; + void* buffer = nullptr; + aclError aclRet = acldvppMalloc(&buffer, size); + if ((aclRet != ACL_ERROR_NONE) || (buffer == nullptr)) { + ATLAS_LOG_ERROR("Malloc dvpp memory failed, error:%d", aclRet); + return ATLAS_ERROR_MALLOC_DVPP; + } + + int ret = ReadFrameFromCamera(id_, buffer, &size); + if ((ret == LIBMEDIA_STATUS_FAILED) || (size != (int)size_)) { + ATLAS_LOG_ERROR("Get image from camera %d failed, size %d", id_, size); + return ATLAS_ERROR_READ_CAMERA_FRAME; + } + + image.format = PIXEL_FORMAT_YUV_SEMIPLANAR_420; + image.width = width_; + image.height = height_; + image.alignWidth = width_; + image.alignHeight = height_; + image.size = (uint32_t)size_; + image.data = SHARED_PRT_DVPP_BUF(buffer); + + return ATLAS_OK; +} + +uint32_t Camera::Get(StreamProperty key) { + uint32_t value = 0; + + switch(key){ + case FRAME_WIDTH: + value = width_; + break; + case FRAME_HEIGHT: + value = height_; + break; + case VIDEO_FPS: + value = fps_; + break; + default: + ATLAS_LOG_ERROR("Unsurpport property %d to get for camera", key); + break; + } + + return value; +} + +AtlasError Camera::Set(StreamProperty key, int value) { + AtlasError ret = ATLAS_OK; + + switch(key){ + case FRAME_WIDTH: + { + if (IsValidWidth(value)) { + width_ = value; + } else { + ret = ATLAS_ERROR_INVALID_PROPERTY_VALUE; + } + break; + } + case FRAME_HEIGHT: + { + if (IsValidHeight(value)) { + height_ = value; + } else { + ret = ATLAS_ERROR_INVALID_PROPERTY_VALUE; + } + break; + } + case VIDEO_FPS: + { + if (IsValidFps(value)) { + fps_ = value; + } else { + ret = ATLAS_ERROR_INVALID_PROPERTY_VALUE; + } + break; + } + default: + { + ret = ATLAS_ERROR_UNSURPPORT_PROPERTY; + ATLAS_LOG_ERROR("Unsurpport property %d to set for camera", key); + break; + } + } + + return ret; +} + +AtlasError Camera::Close() { + if (id_ == CAMERA_ID_INVALID) { + return ATLAS_ERROR_CAMERA_NO_ACCESSABLE; + } + + if (LIBMEDIA_STATUS_FAILED == CloseCamera(id_)) { + ATLAS_LOG_ERROR("Close camera %d failed", id_); + } + + return ATLAS_OK; +} + + +} diff --git a/cplusplus/common/atlasutil/src/camera.h b/cplusplus/common/atlasutil/src/camera.h new file mode 100644 index 0000000000000000000000000000000000000000..ba6b9859529808a41d6b5ce70edf15c263eb38ae --- /dev/null +++ b/cplusplus/common/atlasutil/src/camera.h @@ -0,0 +1,75 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#pragma once + +#include "atlas_utils.h" +#include "atlas_videocapture.h" + +#define CAMERA_NUM (2) +#define CAMERA(i) (g_CameraMgr.cap[i]) + + +class Camera : public AtlasVideoCapBase { +public: + Camera(uint32_t width, uint32_t height, uint32_t fps); + Camera(uint32_t id, uint32_t width, uint32_t height, uint32_t fps); + ~Camera(){}; + + bool IsOpened(); + + AtlasError Read(ImageData& frame); + AtlasError Close(); + + AtlasError Open(); + bool IsAccessible(uint32_t id); + AtlasError Set(StreamProperty key, int value); + uint32_t Get(StreamProperty key); + +private: + bool IsValidWidth(int width); + bool IsValidHeight(int height); + bool IsValidFps(int fps); + AtlasError SetProperty(); + CameraId GetOneAccessableSlot(); + +private: + uint32_t id_; + uint32_t width_; + uint32_t height_; + uint32_t size_; + uint32_t fps_; +}; + + + + diff --git a/cplusplus/common/atlasutil/src/dvpp_cropandpaste.cpp b/cplusplus/common/atlasutil/src/dvpp_cropandpaste.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8e30553e8027b07328792d5f78d30d44af75a9ab --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_cropandpaste.cpp @@ -0,0 +1,218 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.cpp +* Description: handle dvpp process +*/ + +#include +#include "acl/acl.h" +#include "atlas_utils.h" +#include "dvpp_cropandpaste.h" + +using namespace std; + +DvppCropAndPaste::DvppCropAndPaste(aclrtStream& stream, + acldvppChannelDesc *dvppChannelDesc, + uint32_t ltHorz, uint32_t ltVert, + uint32_t rbHorz, uint32_t rbVert) +: stream_(stream), dvppChannelDesc_(dvppChannelDesc), vpcInputDesc_(nullptr), +vpcOutputDesc_(nullptr), vpcOutBufferDev_(nullptr),vpcOutBufferSize_(0), +cropArea_(nullptr), pasteArea_(nullptr){ + // Change the left top coordinate to even numver + ltHorz_ = (ltHorz >> 1) << 1; + ltVert_ = (ltVert >> 1) << 1; + + // Change the left top coordinate to odd numver + rbHorz_ = ((rbHorz >> 1) << 1) - 1; + rbVert_ = ((rbVert >> 1) << 1) - 1; + + size_.width = rbHorz_ - ltHorz_; + size_.height = rbVert_ - ltVert_; +} + +DvppCropAndPaste::~DvppCropAndPaste() { + DestroyCropAndPasteResource(); +} + +AtlasError DvppCropAndPaste::InitCropAndPasteInputDesc(ImageData& inputImage) { + uint32_t alignWidth = ALIGN_UP16(inputImage.width); + uint32_t alignHeight = ALIGN_UP2(inputImage.height); + if (alignWidth == 0 || alignHeight == 0) { + ATLAS_LOG_ERROR("Invalid image parameters, width %d, height %d", + inputImage.width, inputImage.height); + return ATLAS_ERROR; + } + + uint32_t inputBufferSize = YUV420SP_SIZE(alignWidth, alignHeight); + vpcInputDesc_ = acldvppCreatePicDesc(); + if (vpcInputDesc_ == nullptr) { + ATLAS_LOG_ERROR("Dvpp crop create pic desc failed"); + return ATLAS_ERROR; + } + + acldvppSetPicDescData(vpcInputDesc_, inputImage.data.get()); + acldvppSetPicDescFormat(vpcInputDesc_, PIXEL_FORMAT_YUV_SEMIPLANAR_420); + acldvppSetPicDescWidth(vpcInputDesc_, inputImage.width); + acldvppSetPicDescHeight(vpcInputDesc_, inputImage.height); + acldvppSetPicDescWidthStride(vpcInputDesc_, alignWidth); + acldvppSetPicDescHeightStride(vpcInputDesc_, alignHeight); + acldvppSetPicDescSize(vpcInputDesc_, inputBufferSize); + + return ATLAS_OK; +} + +AtlasError DvppCropAndPaste::InitCropAndPasteOutputDesc() +{ + int cropOutWidth = size_.width; + int cropOutHeight = size_.height; + int cropOutWidthStride = ALIGN_UP16(cropOutWidth); + int cropOutHeightStride = ALIGN_UP2(cropOutHeight); + + if (cropOutWidthStride == 0 || cropOutHeightStride == 0) { + ATLAS_LOG_ERROR("Crop image align widht(%d) and height(%d) failed", + size_.width, size_.height); + return ATLAS_ERROR; + } + + vpcOutBufferSize_ = YUV420SP_SIZE(cropOutWidthStride, + cropOutHeightStride); + aclError aclRet = acldvppMalloc(&vpcOutBufferDev_, vpcOutBufferSize_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Dvpp crop malloc output memory failed, crop " + "width %d, height %d size %d, error %d", + size_.width, size_.height, + vpcOutBufferSize_, aclRet); + return ATLAS_ERROR; + } + + vpcOutputDesc_ = acldvppCreatePicDesc(); + if (vpcOutputDesc_ == nullptr) { + ATLAS_LOG_ERROR("Dvpp crop create pic desc failed"); + return ATLAS_ERROR; + } + acldvppSetPicDescData(vpcOutputDesc_, vpcOutBufferDev_); + acldvppSetPicDescFormat(vpcOutputDesc_, PIXEL_FORMAT_YUV_SEMIPLANAR_420); + acldvppSetPicDescWidth(vpcOutputDesc_, cropOutWidth); + acldvppSetPicDescHeight(vpcOutputDesc_, cropOutHeight); + acldvppSetPicDescWidthStride(vpcOutputDesc_, cropOutWidthStride); + acldvppSetPicDescHeightStride(vpcOutputDesc_, cropOutHeightStride); + acldvppSetPicDescSize(vpcOutputDesc_, vpcOutBufferSize_); + + return ATLAS_OK; +} + +AtlasError DvppCropAndPaste::InitCropAndPasteResource(ImageData& inputImage) { + if (ATLAS_OK != InitCropAndPasteInputDesc(inputImage)) { + ATLAS_LOG_ERROR("Dvpp crop init input failed"); + return ATLAS_ERROR; + } + + if (ATLAS_OK != InitCropAndPasteOutputDesc()) { + ATLAS_LOG_ERROR("Dvpp crop init output failed"); + return ATLAS_ERROR; + } + + return ATLAS_OK; +} + +AtlasError DvppCropAndPaste::Process(ImageData& cropedImage, ImageData& srcImage) +{ + if (ATLAS_OK != InitCropAndPasteResource(srcImage)) { + ATLAS_LOG_ERROR("Dvpp cropandpaste failed for init error"); + return ATLAS_ERROR; + } + + // must even + uint32_t cropLeftOffset = ltHorz_; + // must even + uint32_t cropTopOffset = ltVert_; + // must odd + uint32_t cropRightOffset = rbHorz_; + // must odd + uint32_t cropBottomOffset = rbVert_; + + cropArea_ = acldvppCreateRoiConfig(cropLeftOffset, cropRightOffset, + cropTopOffset, cropBottomOffset); + if (cropArea_ == nullptr) { + ATLAS_LOG_ERROR("acldvppCreateRoiConfig cropArea_ failed"); + return ATLAS_ERROR; + } + + // must even + uint32_t pasteLeftOffset = 0; + // must even + uint32_t pasteTopOffset = 0; + // must odd + uint32_t pasteRightOffset = size_.width; + // must odd + uint32_t pasteBottomOffset = size_.height; + + pasteArea_ = acldvppCreateRoiConfig(pasteLeftOffset, pasteRightOffset, + pasteTopOffset, pasteBottomOffset); + if (pasteArea_ == nullptr) { + ATLAS_LOG_ERROR("acldvppCreateRoiConfig pasteArea_ failed"); + return ATLAS_ERROR; + } + + // crop and patse pic + //TODO: + aclError aclRet = acldvppVpcCropAndPasteAsync(dvppChannelDesc_, vpcInputDesc_, + vpcOutputDesc_, cropArea_, pasteArea_, stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acldvppVpcCropAndPasteAsync failed, aclRet = %d", aclRet); + return ATLAS_ERROR; + } + + aclRet = aclrtSynchronizeStream(stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("crop and paste aclrtSynchronizeStream failed, aclRet = %d", aclRet); + return ATLAS_ERROR; + } + cropedImage.format = PIXEL_FORMAT_YUV_SEMIPLANAR_420; + cropedImage.width = ALIGN_UP16(size_.width); + cropedImage.height = ALIGN_UP2(size_.height); + cropedImage.alignWidth = cropedImage.width; + cropedImage.alignHeight = cropedImage.height; + cropedImage.size = vpcOutBufferSize_; + cropedImage.data = SHARED_PRT_DVPP_BUF(vpcOutBufferDev_); + + DestroyCropAndPasteResource(); + + return ATLAS_OK; +} + +void DvppCropAndPaste::DestroyCropAndPasteResource() +{ + if (cropArea_ != nullptr) { + (void)acldvppDestroyRoiConfig(cropArea_); + cropArea_ = nullptr; + } + + if (pasteArea_ != nullptr) { + (void)acldvppDestroyRoiConfig(pasteArea_); + pasteArea_ = nullptr; + } + + if (vpcInputDesc_ != nullptr) { + (void)acldvppDestroyPicDesc(vpcInputDesc_); + vpcInputDesc_ = nullptr; + } + + if (vpcOutputDesc_ != nullptr) { + (void)acldvppDestroyPicDesc(vpcOutputDesc_); + vpcOutputDesc_ = nullptr; + } +} diff --git a/cplusplus/common/atlasutil/src/dvpp_cropandpaste.h b/cplusplus/common/atlasutil/src/dvpp_cropandpaste.h new file mode 100644 index 0000000000000000000000000000000000000000..6fee164361a4f427efa4d656742470cd10ccb4fc --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_cropandpaste.h @@ -0,0 +1,86 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.h +* Description: handle dvpp process +*/ +#pragma once +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +class DvppCropAndPaste { + public: + /** + * @brief Constructor + * @param [in] stream: stream + */ + DvppCropAndPaste(aclrtStream &stream, acldvppChannelDesc *dvppChannelDesc, + uint32_t lt_horz, uint32_t lt_vert,uint32_t rb_horz, uint32_t rb_vert); + + /** + * @brief Destructor + */ + ~DvppCropAndPaste(); + + /** + * @brief dvpp global init + * @return AtlasError + */ + AtlasError InitResource(); + + /** + * @brief init dvpp output para + * @param [in] modelInputWidth: model input width + * @param [in] modelInputHeight: model input height + * @return AtlasError + */ + AtlasError InitOutputPara(int modelInputWidth, int modelInputHeight); + + /** + * @brief dvpp process + * @return AtlasError + */ + AtlasError Process(ImageData& resizedImage, ImageData& srcImage); + +private: + AtlasError InitCropAndPasteResource(ImageData& inputImage); + AtlasError InitCropAndPasteInputDesc(ImageData& inputImage); + AtlasError InitCropAndPasteOutputDesc(); + + void DestroyCropAndPasteResource(); + +private: + aclrtStream stream_; + acldvppChannelDesc *dvppChannelDesc_; + + acldvppPicDesc *vpcInputDesc_; + acldvppPicDesc *vpcOutputDesc_; + + void *vpcOutBufferDev_; + uint32_t vpcOutBufferSize_; + + acldvppRoiConfig *cropArea_; + acldvppRoiConfig *pasteArea_; + + Resolution size_; + + uint32_t ltHorz_; + uint32_t rbHorz_; + uint32_t ltVert_; + uint32_t rbVert_; +}; + diff --git a/cplusplus/common/atlasutil/src/dvpp_jpegd.cpp b/cplusplus/common/atlasutil/src/dvpp_jpegd.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d12259181b4b6378e11643b71526461a458ad0df --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_jpegd.cpp @@ -0,0 +1,112 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.cpp +* Description: handle dvpp process +*/ + +#include +#include "acl/acl.h" +#include "dvpp_jpegd.h" + +using namespace std; + +DvppJpegD::DvppJpegD(aclrtStream& stream, acldvppChannelDesc *dvppChannelDesc) + : stream_(stream), dvppChannelDesc_(dvppChannelDesc), + decodeOutBufferDev_(nullptr), decodeOutputDesc_(nullptr) +{ +} + +DvppJpegD::~DvppJpegD() +{ + DestroyDecodeResource(); +} + + +AtlasError DvppJpegD::InitDecodeOutputDesc(ImageData& inputImage) +{ + uint32_t decodeOutWidthStride = ALIGN_UP128(inputImage.width); + uint32_t decodeOutHeightStride = ALIGN_UP16(inputImage.height); + if (decodeOutWidthStride == 0 || decodeOutHeightStride == 0) { + ATLAS_LOG_ERROR("Input image width %d or height %d invalid", + inputImage.width, inputImage.height); + return ATLAS_ERROR_INVALID_ARGS; + } + + uint32_t decodeOutBufferSize = + YUV420SP_SIZE(decodeOutWidthStride, decodeOutHeightStride) * 2; + + aclError aclRet = acldvppMalloc(&decodeOutBufferDev_, decodeOutBufferSize); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Malloc dvpp memory failed, error:%d", aclRet); + return ATLAS_ERROR_MALLOC_DVPP; + } + + decodeOutputDesc_ = acldvppCreatePicDesc(); + if (decodeOutputDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create dvpp pic desc failed"); + return ATLAS_ERROR_CREATE_PIC_DESC; + } + + acldvppSetPicDescData(decodeOutputDesc_, decodeOutBufferDev_); + acldvppSetPicDescFormat(decodeOutputDesc_, PIXEL_FORMAT_YUV_SEMIPLANAR_420); + acldvppSetPicDescWidth(decodeOutputDesc_, inputImage.width); + acldvppSetPicDescHeight(decodeOutputDesc_, inputImage.height); + acldvppSetPicDescWidthStride(decodeOutputDesc_, decodeOutWidthStride); + acldvppSetPicDescHeightStride(decodeOutputDesc_, decodeOutHeightStride); + acldvppSetPicDescSize(decodeOutputDesc_, decodeOutBufferSize); + + return ATLAS_OK; +} + +AtlasError DvppJpegD::Process(ImageData& dest, ImageData& src) +{ + int ret = InitDecodeOutputDesc(src); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("InitDecodeOutputDesc failed"); + return ret; + } + + aclError aclRet = acldvppJpegDecodeAsync(dvppChannelDesc_, + reinterpret_cast(src.data.get()), + src.size, decodeOutputDesc_, stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acldvppJpegDecodeAsync failed, error: %d", aclRet); + return ATLAS_ERROR_JPEGD_ASYNC; + } + + aclRet = aclrtSynchronizeStream(stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Sync stream failed, error: %d", aclRet); + return ATLAS_ERROR_SYNC_STREAM; + } + dest.format = PIXEL_FORMAT_YUV_SEMIPLANAR_420; + dest.width = ALIGN_UP128(src.width); + dest.height = ALIGN_UP16(src.height); + dest.alignWidth = dest.width; + dest.alignHeight = dest.height; + dest.size = YUV420SP_SIZE(dest.alignWidth, dest.alignHeight); + dest.data = SHARED_PRT_DVPP_BUF(decodeOutBufferDev_); + + return ATLAS_OK; +} + +void DvppJpegD::DestroyDecodeResource() +{ + if (decodeOutputDesc_ != nullptr) { + acldvppDestroyPicDesc(decodeOutputDesc_); + decodeOutputDesc_ = nullptr; + } +} diff --git a/cplusplus/common/atlasutil/src/dvpp_jpegd.h b/cplusplus/common/atlasutil/src/dvpp_jpegd.h new file mode 100644 index 0000000000000000000000000000000000000000..ef604195bc09f490a0e8b0dfd80e84af2cd10ce3 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_jpegd.h @@ -0,0 +1,87 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.h +* Description: handle dvpp process +*/ +#pragma once +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" +#include "atlas_utils.h" + +class DvppJpegD { +public: + /** + * @brief Constructor + * @param [in] stream: stream + */ + DvppJpegD(aclrtStream &stream, acldvppChannelDesc *dvppChannelDesc); + + /** + * @brief Destructor + */ + ~DvppJpegD(); + + /** + * @brief dvpp global init + * @return result + */ + AtlasError InitResource(); + + /** + * @brief init dvpp output para + * @param [in] modelInputWidth: model input width + * @param [in] modelInputHeight: model input height + * @return result + */ + AtlasError InitOutputPara(int modelInputWidth, int modelInputHeight); + + /** + * @brief set jpegd input + * @param [in] inDevBuffer: device buffer of input pic + * @param [in] inDevBufferSize: device buffer size of input pic + * @param [in] inputWidth:width of pic + * @param [in] inputHeight:height of pic + */ + void SetInput4JpegD(uint8_t* inDevBuffer, int inDevBufferSize, + int inputWidth, int inputHeight); + AtlasError InitDecodeOutputDesc(ImageData& inputImage); + /** + * @brief gett dvpp output + * @param [in] outputBuffer: pointer which points to dvpp output buffer + * @param [out] outputSize: output size + */ + void GetOutput(void **outputBuffer, int &outputSize); + AtlasError Process(ImageData& dest, ImageData& src); + /** + * @brief release encode resource + */ + void DestroyEncodeResource(); + +private: + void DestroyDecodeResource(); + void DestroyResource(); + void DestroyOutputPara(); + +private: + aclrtStream stream_; + acldvppChannelDesc *dvppChannelDesc_; + + void* decodeOutBufferDev_; // decode output buffer + acldvppPicDesc *decodeOutputDesc_; //decode output desc +}; + diff --git a/cplusplus/common/atlasutil/src/dvpp_jpege.cpp b/cplusplus/common/atlasutil/src/dvpp_jpege.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5b7786fa813599af3102e9290c43c48b5980c494 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_jpege.cpp @@ -0,0 +1,133 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.cpp +* Description: handle dvpp process +*/ + +#include +#include "acl/acl.h" +#include "dvpp_jpege.h" + +using namespace std; + +DvppJpegE::DvppJpegE(aclrtStream& stream, acldvppChannelDesc* dvppChannelDesc) + : stream_(stream), dvppChannelDesc_(dvppChannelDesc), jpegeConfig_(nullptr), encodeOutBufferSize_(0), + encodeOutBufferDev_(nullptr), encodeInputDesc_(nullptr){ +} + + +DvppJpegE::~DvppJpegE() { + +} + +AtlasError DvppJpegE::InitEncodeInputDesc(ImageData& inputImage) +{ + uint32_t alignWidth = ALIGN_UP16(inputImage.width); + uint32_t alignHeight = ALIGN_UP2(inputImage.height); + if (alignWidth == 0 || alignHeight == 0) { + ATLAS_LOG_ERROR("Input image width %d or height %d invalid", + inputImage.width, inputImage.height); + return ATLAS_ERROR_INVALID_ARGS; + } + uint32_t inputBufferSize = YUV420SP_SIZE(alignWidth, alignHeight); + + encodeInputDesc_ = acldvppCreatePicDesc(); + if (encodeInputDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create dvpp pic desc failed"); + return ATLAS_ERROR_CREATE_PIC_DESC; + } + + acldvppSetPicDescData(encodeInputDesc_, + reinterpret_cast(inputImage.data.get())); + acldvppSetPicDescFormat(encodeInputDesc_, inputImage.format); + acldvppSetPicDescWidth(encodeInputDesc_, inputImage.width); + acldvppSetPicDescHeight(encodeInputDesc_, inputImage.height); + acldvppSetPicDescWidthStride(encodeInputDesc_, alignWidth); + acldvppSetPicDescHeightStride(encodeInputDesc_, alignHeight); + acldvppSetPicDescSize(encodeInputDesc_, inputBufferSize); + + return ATLAS_OK; +} + +AtlasError DvppJpegE::InitJpegEResource(ImageData& inputImage) { + uint32_t encodeLevel = 100; // default optimal level (0-100) + + AtlasError ret = InitEncodeInputDesc(inputImage); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Dvpp jpege init input desc failed"); + return ret; + } + + jpegeConfig_ = acldvppCreateJpegeConfig(); + acldvppSetJpegeConfigLevel(jpegeConfig_, encodeLevel); + + acldvppJpegPredictEncSize(encodeInputDesc_, jpegeConfig_, &encodeOutBufferSize_); + aclError aclRet = acldvppMalloc(&encodeOutBufferDev_, encodeOutBufferSize_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Malloc dvpp memory error(%d)", aclRet); + return ATLAS_ERROR_MALLOC_DVPP; + } + + return ATLAS_OK; +} + +AtlasError DvppJpegE::Process(ImageData& destJpegImage, ImageData& srcYuvImage) +{ + AtlasError ret = InitJpegEResource(srcYuvImage); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Dvpp jpege failed for init error"); + return ret; + } + + aclError aclRet = acldvppJpegEncodeAsync(dvppChannelDesc_, + encodeInputDesc_, + encodeOutBufferDev_, + &encodeOutBufferSize_, + jpegeConfig_, stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Dvpp jpege async failed, error:%d", aclRet); + return ATLAS_ERROR_JPEGE_ASYNC; + } + + aclRet = aclrtSynchronizeStream(stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Dvpp jpege sync stream failed, error:%d", aclRet); + return ATLAS_ERROR_SYNC_STREAM; + } + + destJpegImage.width = srcYuvImage.width; + destJpegImage.height = srcYuvImage.height; + destJpegImage.size = encodeOutBufferSize_; + destJpegImage.data.reset((uint8_t*)encodeOutBufferDev_, + [](uint8_t* p) { acldvppFree(p); }); + + DestroyEncodeResource(); + + return ATLAS_OK; +} + +void DvppJpegE::DestroyEncodeResource() +{ + if (jpegeConfig_ != nullptr) { + (void)acldvppDestroyJpegeConfig(jpegeConfig_); + jpegeConfig_ = nullptr; + } + + if (encodeInputDesc_ != nullptr) { + (void)acldvppDestroyPicDesc(encodeInputDesc_); + encodeInputDesc_ = nullptr; + } +} diff --git a/cplusplus/common/atlasutil/src/dvpp_jpege.h b/cplusplus/common/atlasutil/src/dvpp_jpege.h new file mode 100644 index 0000000000000000000000000000000000000000..54d2c4fd8dadf96d0d08e23d679d884bc2e67b78 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_jpege.h @@ -0,0 +1,66 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.h +* Description: handle dvpp process +*/ +#pragma once +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" +#include "atlas_utils.h" +#include "dvpp_process.h" + +class DvppJpegE{ +public: + /** + * @brief Constructor + * @param [in] stream: stream + */ + DvppJpegE(aclrtStream &stream, acldvppChannelDesc* dvppChannelDesc); + + /** + * @brief Destructor + */ + ~DvppJpegE(); + + /** + * @brief process encode + * @return result + */ + AtlasError Process(ImageData& destJpegImage, ImageData& srcYuvImage); + + /** + * @brief release encode resource + */ + void DestroyEncodeResource(); + +private: + AtlasError InitJpegEResource(ImageData& inputImage); + AtlasError InitEncodeInputDesc(ImageData& inputImage); + void DestroyResource(); + void DestroyOutputPara(); + + aclrtStream stream_; + acldvppChannelDesc* dvppChannelDesc_; + + acldvppJpegeConfig* jpegeConfig_; + + uint32_t encodeOutBufferSize_; + void* encodeOutBufferDev_; // encode output buffer + acldvppPicDesc* encodeInputDesc_; //encode input desc +}; + diff --git a/cplusplus/common/atlasutil/src/dvpp_mem_mgr.cpp b/cplusplus/common/atlasutil/src/dvpp_mem_mgr.cpp new file mode 100644 index 0000000000000000000000000000000000000000..670f30b03ee78cb29b4f0fa5945e756e7ca7c5e0 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_mem_mgr.cpp @@ -0,0 +1,288 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include "atlas_utils.h" +#include "dvpp_mem_mgr.h" + + +using namespace std; + +namespace { + const uint32_t kDvppMemAlign = 128; + const uint32_t kDefaultBlockNum = 2048; + const uint32_t kLevelInvalid = kLevelNum; + const uint32_t kPoolIdInvalid = kPerLevelPoolsNum; + const uint32_t kLevelSmallMax = 512; + const uint32_t kLevelLargeMax = 64; + + const uint32_t kLevel4k = 4 * 1024; + const uint32_t kLevel16k = 16 * 1024; + const uint32_t kLevel64k = 64 * 1024; + const uint32_t kLevel256k = 256 * 1024; + const uint32_t kLevel512k = 512 * 1024; + const uint32_t kLevel1024k = 1024 * 1024; + const uint32_t kLevel2m = 2 * 1024 * 1024; + const uint32_t kLevel4m = 4 * 1024 * 1024; + const uint32_t kLevel8m = 8 * 1024 * 1024; +} + + +DvppMemPool::DvppMemPool() : +blockNum_(0), +freeList_(kDefaultBlockNum) { +} + +DvppMemPool::~DvppMemPool() { + std::lock_guard lock(mutex_lock_); + do { + void* buffer = freeList_.Pop(); + if (!buffer) { + break; + } + acldvppFree(buffer); + blockNum_--; + }while(1); +} + +void* DvppMemPool::MallocMem(uint32_t size, bool& isNewBlock) { + isNewBlock = false; + std::lock_guard lock(mutex_lock_); + void* buffer = freeList_.Pop(); + if (!buffer) { + aclError aclRet = acldvppMalloc(&buffer, size); + if ((aclRet != ACL_ERROR_NONE) || (buffer == nullptr)) { + ATLAS_LOG_ERROR("Acl dvpp malloc return error %d", aclRet); + return nullptr; + } + blockNum_++; + isNewBlock = true; + } + + usedList_.push_back(buffer); + + return buffer; +} + +void DvppMemPool::FreeMem(void* ptr) { + std::lock_guard lock(mutex_lock_); + list::iterator it = usedList_.begin(); + while (it != usedList_.end()) { + if (*it == ptr) { + usedList_.erase(it); + freeList_.Push(ptr); + return; + } + it++; + } + + ATLAS_LOG_ERROR("Free memory %p in pool failed", ptr); + it = usedList_.begin(); + while (it != usedList_.end()) { + it++; + } + + return; +} + +DvppMemPoolMgr::DvppMemPoolMgr() { + levelList_[0].size = kLevel4k; + levelList_[0].maxBlockNum = kLevelSmallMax; + + levelList_[1].size = kLevel16k; + levelList_[1].maxBlockNum = kLevelSmallMax; + + levelList_[2].size = kLevel64k; + levelList_[2].maxBlockNum = kLevelSmallMax; + + levelList_[3].size = kLevel256k; + levelList_[3].maxBlockNum = kLevelSmallMax; + + levelList_[4].size = kLevel512k; + levelList_[4].maxBlockNum = kLevelSmallMax; + + levelList_[5].size = kLevel1024k; + levelList_[5].maxBlockNum = kLevelSmallMax; + + levelList_[6].size = kLevel2m; + levelList_[6].maxBlockNum = kLevelLargeMax; + + levelList_[7].size = kLevel4m; + levelList_[7].maxBlockNum = 512; + + levelList_[8].size = kLevel8m; + levelList_[8].maxBlockNum = kLevelLargeMax; +} + +void* DvppMemPoolMgr::MallocMem(uint32_t size) { + uint32_t wrapSize = size + kDvppMemAlign; + uint32_t level = ChooseLevel(wrapSize); + if (level >= kLevelInvalid) { + return MallocDvppMemory(wrapSize); + } + + + uint32_t index = ChoosePool(level); + if (index >= kPoolIdInvalid) { + return MallocDvppMemory(wrapSize); + } + + bool isNewBlock = false; + void* buffer = levelList_[level].poolTbl[index].MallocMem(wrapSize, isNewBlock); + if (!buffer) { + return nullptr; + } + + if (isNewBlock) { + levelList_[level].totalBlockNum++; + } + + return WrapMem(buffer, level, index, MEM_POOL_MALLOC, size); +} + +void* DvppMemPoolMgr::MallocDvppMemory(uint32_t size) { + void* buffer = nullptr; + aclError aclRet = acldvppMalloc(&buffer, size); + if ((aclRet != ACL_ERROR_NONE) || (buffer == nullptr)) { + ATLAS_LOG_ERROR("Acl dvpp malloc return error %d", aclRet); + return nullptr; + } + return WrapMem(buffer, kLevelInvalid, 0, MEM_ACL_MALLOC, size); +} + +void* DvppMemPoolMgr::WrapMem(void* buffer, uint32_t level, + uint32_t poolId, MallocType mallocType, uint32_t size) { + MallocInfo *info = (MallocInfo *)buffer; + info->level = level; + info->poolId = poolId; + info->size = size; + info->mallocType = mallocType; + return (void *)((uint8_t*)buffer + kDvppMemAlign); +} + +void DvppMemPoolMgr::FreeMem(void* ptr) { + void* buffer = (void *)((uint8_t *)ptr - kDvppMemAlign); + MallocInfo *info = (MallocInfo *)((uint8_t *)ptr - kDvppMemAlign); + if (info->mallocType == MEM_ACL_MALLOC) { + acldvppFree(buffer); + return; + } + + DvppMemPool& pool = levelList_[info->level].poolTbl[info->poolId]; + pool.FreeMem(buffer); + + return; +} + +uint32_t DvppMemPoolMgr::ChooseLevel(uint32_t size) { + for (uint32_t i = 0; i < kLevelNum; i++) { + if (size < levelList_[i].size) { + return i; + } + } + + return kLevelInvalid; +} + +uint32_t DvppMemPoolMgr::ChoosePool(uint32_t level) { + uint32_t index = 0; + PoolLevel& poolLevel = levelList_[level]; + if (poolLevel.totalBlockNum >= poolLevel.maxBlockNum) { + uint32_t maxFreeNum = poolLevel.poolTbl[0].FreeBlockNum(); + for (uint32_t i = 1; i < kPerLevelPoolsNum; i++) { + if (maxFreeNum < poolLevel.poolTbl[i].FreeBlockNum()) { + maxFreeNum = poolLevel.poolTbl[i].FreeBlockNum(); + index = i; + } + if (maxFreeNum == 0) { + index = kPoolIdInvalid; + } + } + } else { + uint32_t minBlockNum = poolLevel.poolTbl[0].BlockNum(); + for (uint32_t i = 1; i < kPerLevelPoolsNum; i++) { + if (minBlockNum > poolLevel.poolTbl[i].BlockNum()) { + minBlockNum = poolLevel.poolTbl[i].BlockNum(); + index = i; + } + } + } + + return index; +} + +void DvppMemPoolMgr::PrintPoolInfo() { + printf("Mem pool: total malloc times %d, total free times %d\n", + totalMallocTimes_, totalFreeTimes_); + for (uint32_t i = 0; i < kLevelNum; i++) { + if (levelList_[i].totalBlockNum == 0) + continue; + + printf("Level: %d\n", levelList_[i].size); + for (uint32_t j = 0; j < kPerLevelPoolsNum; j++) { + DvppMemPool& pool = levelList_[i].poolTbl[j]; + if (pool.BlockNum() == 0) + continue; + + printf("pool %d: block num %d, free:%d, used: %d\n", + j, pool.BlockNum(), pool.FreeBlockNum(), + pool.UsedBlockNum()); + } + } +} + + +void* AtlasDvppMalloc(uint32_t size) { + DvppMemPoolMgr& inst = DvppMemPoolMgr::GetInstance(); + inst.StatisticMalloc(); + return inst.MallocMem(size); +} + +void AtlasDvppFree(void* ptr) { + DvppMemPoolMgr& inst = DvppMemPoolMgr::GetInstance(); + inst.StatisticFree(); + return inst.FreeMem(ptr); +} + +void PrintDvppMgrInfo() { + DvppMemPoolMgr& inst = DvppMemPoolMgr::GetInstance(); + inst.PrintPoolInfo(); +} + + diff --git a/cplusplus/common/atlasutil/src/dvpp_mem_mgr.h b/cplusplus/common/atlasutil/src/dvpp_mem_mgr.h new file mode 100644 index 0000000000000000000000000000000000000000..c9a97652c372759cafa2f2c7cb319445bd8e42a3 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_mem_mgr.h @@ -0,0 +1,124 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#pragma once + +#include +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +#include "thread_safe_queue.h" +#include "atlas_error.h" +#include "dvpp_mem_mgr.h" + +namespace { + const uint32_t kLevelNum = 9; + const uint32_t kPerLevelPoolsNum = 4; +} + +enum MallocType { + MEM_ACL_MALLOC = 0, + MEM_POOL_MALLOC, + UNKNOW_MALLOC_TYPE, +}; + +struct MallocInfo { + uint32_t level; + uint32_t poolId; + uint32_t size; + MallocType mallocType; +}; + +class DvppMemPool { +public: + DvppMemPool(); + ~DvppMemPool(); + + void* MallocMem(uint32_t size, bool& isNewBlock); + void FreeMem(void* ptr); + + uint32_t BlockNum() { return blockNum_; } + uint32_t FreeBlockNum() { return freeList_.Size(); } + uint32_t UsedBlockNum() { return usedList_.size(); } +private: + uint32_t blockNum_; + ThreadSafeQueue freeList_; + std::list usedList_; + mutable std::mutex mutex_lock_; +}; + +struct PoolLevel { + uint32_t size; + uint32_t maxBlockNum; + uint32_t totalBlockNum = 0; + DvppMemPool poolTbl[kPerLevelPoolsNum]; +}; + +class DvppMemPoolMgr { +public: + DvppMemPoolMgr(); + + DvppMemPoolMgr(const DvppMemPoolMgr&) = delete; + DvppMemPoolMgr& operator=(const DvppMemPoolMgr&) = delete; + + static DvppMemPoolMgr& GetInstance() { + static DvppMemPoolMgr instance; + return instance; + } + + ~DvppMemPoolMgr() {}; + + void* MallocMem(uint32_t size); + void FreeMem(void* ptr); + + void StatisticMalloc() { totalMallocTimes_++; } + void StatisticFree() { totalFreeTimes_++; } + void PrintPoolInfo(); + +private: + uint32_t ChooseLevel(uint32_t size); + uint32_t ChoosePool(uint32_t level); + void* MallocDvppMemory(uint32_t size); + void* WrapMem(void* buffer, uint32_t level, + uint32_t poolId, MallocType mallocType, uint32_t size); + +private: + PoolLevel levelList_[kLevelNum]; + + uint32_t totalMallocTimes_; + uint32_t totalFreeTimes_; +}; + +void* AtlasDvppMalloc(uint32_t size); +void AtlasDvppFree(void* ptr); +void PrintDvppMgrInfo(); + diff --git a/cplusplus/common/atlasutil/src/dvpp_process.cpp b/cplusplus/common/atlasutil/src/dvpp_process.cpp new file mode 100644 index 0000000000000000000000000000000000000000..85c80ccd32406a753a257c0e69997ce95f5f6ab2 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_process.cpp @@ -0,0 +1,115 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.cpp +* Description: handle dvpp process +*/ + +#include +#include "acl/acl.h" +#include "dvpp_resize.h" +#include "dvpp_jpegd.h" +#include "dvpp_jpege.h" +#include "dvpp_process.h" +#include "dvpp_cropandpaste.h" + +using namespace std; + +DvppProcess::DvppProcess() : +isInitOk_(false), stream_(nullptr), +dvppChannelDesc_(nullptr), isReleased_(false) { +} + +DvppProcess::~DvppProcess() { + DestroyResource(); +} + +void DvppProcess::DestroyResource() { + if (isReleased_) { + return; + } + + aclError aclRet; + + if (dvppChannelDesc_ != nullptr) { + aclRet = acldvppDestroyChannel(dvppChannelDesc_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Destroy dvpp channel error: %d", aclRet); + } + + (void)acldvppDestroyChannelDesc(dvppChannelDesc_); + dvppChannelDesc_ = nullptr; + } + + if (stream_ != nullptr) { + aclRet = aclrtDestroyStream(stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec destroy stream failed, error %d", aclRet); + } + stream_ = nullptr; + } + + isReleased_ = true; +} + +AtlasError DvppProcess::Init() { + aclError aclRet = aclrtCreateStream(&stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Create venc stream failed, error %d", aclRet); + return ATLAS_ERROR_CREATE_STREAM; + } + + dvppChannelDesc_ = acldvppCreateChannelDesc(); + if (dvppChannelDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create dvpp channel desc failed"); + return ATLAS_ERROR_CREATE_DVPP_CHANNEL_DESC; + } + + aclRet = acldvppCreateChannel(dvppChannelDesc_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acldvppCreateChannel failed, aclRet = %d", aclRet); + return ATLAS_ERRROR_CREATE_DVPP_CHANNEL; + } + + isInitOk_ = true; + ATLAS_LOG_INFO("dvpp init resource ok"); + + return ATLAS_OK; +} + +AtlasError DvppProcess::Resize(ImageData& dest, ImageData& src, + uint32_t width, uint32_t height) { + DvppResize resizeOp(stream_, dvppChannelDesc_, width, height); + return resizeOp.Process(dest, src); +} + +AtlasError DvppProcess::JpegD(ImageData& dest, ImageData& src) { + DvppJpegD jpegD(stream_, dvppChannelDesc_); + return jpegD.Process(dest, src); +} + +AtlasError DvppProcess::Crop(ImageData& dest, ImageData& src, + uint32_t ltHorz, uint32_t ltVert, + uint32_t rbHorz, uint32_t rbVert) { + DvppCropAndPaste crop(stream_, dvppChannelDesc_, + ltHorz, ltVert, rbHorz, rbVert); + return crop.Process(dest, src); +} + +AtlasError DvppProcess::JpegE(ImageData& dest, ImageData& src) { + DvppJpegE jpegE(stream_, dvppChannelDesc_); + return jpegE.Process(dest, src); +} + diff --git a/cplusplus/common/atlasutil/src/dvpp_resize.cpp b/cplusplus/common/atlasutil/src/dvpp_resize.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b9e96e5872ccabe63f0953ea91124d617ab10b40 --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_resize.cpp @@ -0,0 +1,176 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.cpp +* Description: handle dvpp process +*/ + +#include +#include "acl/acl.h" +#include "atlas_utils.h" +#include "dvpp_resize.h" + +using namespace std; + +DvppResize::DvppResize(aclrtStream& stream, acldvppChannelDesc *dvppChannelDesc, + uint32_t width, uint32_t height) +: stream_(stream), dvppChannelDesc_(dvppChannelDesc), +resizeConfig_(nullptr), vpcInputDesc_(nullptr), vpcOutputDesc_(nullptr), +inDevBuffer_(nullptr),vpcOutBufferDev_(nullptr),vpcOutBufferSize_(0) { + size_.width = width; + size_.height = height; +} + +DvppResize::~DvppResize() { + DestroyResizeResource(); +} + +AtlasError DvppResize::InitResizeInputDesc(ImageData& inputImage) { + uint32_t alignWidth = ALIGN_UP16(inputImage.width); + uint32_t alignHeight = ALIGN_UP2(inputImage.height); + if (alignWidth == 0 || alignHeight == 0) { + ATLAS_LOG_ERROR("Input image width %d or height %d invalid", + inputImage.width, inputImage.height); + return ATLAS_ERROR_INVALID_ARGS; + } + + uint32_t inputBufferSize = YUV420SP_SIZE(alignWidth, alignHeight); + vpcInputDesc_ = acldvppCreatePicDesc(); + if (vpcInputDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create dvpp pic desc failed"); + return ATLAS_ERROR_CREATE_PIC_DESC; + } + + acldvppSetPicDescData(vpcInputDesc_, inputImage.data.get()); + acldvppSetPicDescFormat(vpcInputDesc_, inputImage.format); + acldvppSetPicDescWidth(vpcInputDesc_, inputImage.width); + acldvppSetPicDescHeight(vpcInputDesc_, inputImage.height); + acldvppSetPicDescWidthStride(vpcInputDesc_, alignWidth); + acldvppSetPicDescHeightStride(vpcInputDesc_, alignHeight); + acldvppSetPicDescSize(vpcInputDesc_, inputBufferSize); + + return ATLAS_OK; +} + +AtlasError DvppResize::InitResizeOutputDesc() +{ + int resizeOutWidth = size_.width; + int resizeOutHeight = size_.height; + int resizeOutWidthStride = ALIGN_UP16(resizeOutWidth); + int resizeOutHeightStride = ALIGN_UP2(resizeOutHeight); + if (resizeOutWidthStride == 0 || resizeOutHeightStride == 0) { + ATLAS_LOG_ERROR("Align resize width(%d) and height(%d) failed", + size_.width, size_.height); + return ATLAS_ERROR_INVALID_ARGS; + } + + vpcOutBufferSize_ = YUV420SP_SIZE(resizeOutWidthStride, resizeOutHeightStride); + aclError aclRet = acldvppMalloc(&vpcOutBufferDev_, vpcOutBufferSize_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Dvpp resize malloc output buffer failed, " + "size %d, error %d", vpcOutBufferSize_, aclRet); + return ATLAS_ERROR_MALLOC_DVPP; + } + + vpcOutputDesc_ = acldvppCreatePicDesc(); + if (vpcOutputDesc_ == nullptr) { + ATLAS_LOG_ERROR("acldvppCreatePicDesc vpcOutputDesc_ failed"); + return ATLAS_ERROR_CREATE_PIC_DESC; + } + + acldvppSetPicDescData(vpcOutputDesc_, vpcOutBufferDev_); + acldvppSetPicDescFormat(vpcOutputDesc_, PIXEL_FORMAT_YUV_SEMIPLANAR_420); + acldvppSetPicDescWidth(vpcOutputDesc_, resizeOutWidth); + acldvppSetPicDescHeight(vpcOutputDesc_, resizeOutHeight); + acldvppSetPicDescWidthStride(vpcOutputDesc_, resizeOutWidthStride); + acldvppSetPicDescHeightStride(vpcOutputDesc_, resizeOutHeightStride); + acldvppSetPicDescSize(vpcOutputDesc_, vpcOutBufferSize_); + + return ATLAS_OK; +} + +AtlasError DvppResize::InitResizeResource(ImageData& inputImage) { + resizeConfig_ = acldvppCreateResizeConfig(); + if (resizeConfig_ == nullptr) { + ATLAS_LOG_ERROR("Dvpp resize init failed for create config failed"); + return ATLAS_ERROR_CREATE_RESIZE_CONFIG; + } + + AtlasError ret = InitResizeInputDesc(inputImage); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("InitResizeInputDesc failed"); + return ret; + } + + ret = InitResizeOutputDesc(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("InitResizeOutputDesc failed"); + return ret; + } + + return ATLAS_OK; +} + +AtlasError DvppResize::Process(ImageData& resizedImage, ImageData& srcImage) +{ + AtlasError atlRet = InitResizeResource(srcImage); + if (atlRet != ATLAS_OK) { + ATLAS_LOG_ERROR("Dvpp resize failed for init error"); + return atlRet; + } + + // resize pic + aclError aclRet = acldvppVpcResizeAsync(dvppChannelDesc_, vpcInputDesc_, + vpcOutputDesc_, resizeConfig_, stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acldvppVpcResizeAsync failed, error: %d", aclRet); + return ATLAS_ERROR_RESIZE_ASYNC; + } + + aclRet = aclrtSynchronizeStream(stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("resize aclrtSynchronizeStream failed, error: %d", aclRet); + return ATLAS_ERROR_SYNC_STREAM; + } + resizedImage.format = PIXEL_FORMAT_YUV_SEMIPLANAR_420; + resizedImage.width = size_.width; + resizedImage.height = size_.height; + resizedImage.alignWidth = ALIGN_UP16(size_.width); + resizedImage.alignHeight = ALIGN_UP2(size_.height); + resizedImage.size = vpcOutBufferSize_; + resizedImage.data = SHARED_PRT_DVPP_BUF(vpcOutBufferDev_); + + DestroyResizeResource(); + + return ATLAS_OK; +} + +void DvppResize::DestroyResizeResource() +{ + if (resizeConfig_ != nullptr) { + (void)acldvppDestroyResizeConfig(resizeConfig_); + resizeConfig_ = nullptr; + } + + if (vpcInputDesc_ != nullptr) { + (void)acldvppDestroyPicDesc(vpcInputDesc_); + vpcInputDesc_ = nullptr; + } + + if (vpcOutputDesc_ != nullptr) { + (void)acldvppDestroyPicDesc(vpcOutputDesc_); + vpcOutputDesc_ = nullptr; + } +} diff --git a/cplusplus/common/atlasutil/src/dvpp_resize.h b/cplusplus/common/atlasutil/src/dvpp_resize.h new file mode 100644 index 0000000000000000000000000000000000000000..315d270ac57b12d93813ed7d394c114f45ddafed --- /dev/null +++ b/cplusplus/common/atlasutil/src/dvpp_resize.h @@ -0,0 +1,89 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. + +* File dvpp_process.h +* Description: handle dvpp process +*/ +#pragma once +#include + +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" +#include "atlas_utils.h" + +class DvppResize { +public: + /** + * @brief Constructor + * @param [in] stream: stream + */ + DvppResize(aclrtStream &stream, acldvppChannelDesc *dvppChannelDesc, + uint32_t width, uint32_t height); + + /** + * @brief Destructor + */ + ~DvppResize(); + + /** + * @brief dvpp global init + * @return result + */ + AtlasError InitResource(); + + /** + * @brief init dvpp output para + * @param [in] modelInputWidth: model input width + * @param [in] modelInputHeight: model input height + * @return result + */ + AtlasError InitOutputPara(int modelInputWidth, int modelInputHeight); + + /** + * @brief gett dvpp output + * @param [in] outputBuffer: pointer which points to dvpp output buffer + * @param [out] outputSize: output size + */ + void GetOutput(void **outputBuffer, int &outputSize); + + /** + * @brief dvpp process + * @return result + */ + AtlasError Process(ImageData& resizedImage, ImageData& srcImage); + +private: + AtlasError InitResizeResource(ImageData& inputImage); + AtlasError InitResizeInputDesc(ImageData& inputImage); + AtlasError InitResizeOutputDesc(); + + void DestroyResizeResource(); + void DestroyResource(); + void DestroyOutputPara(); + + aclrtStream stream_; + acldvppChannelDesc *dvppChannelDesc_; + + acldvppResizeConfig *resizeConfig_; + + acldvppPicDesc *vpcInputDesc_; // vpc input desc + acldvppPicDesc *vpcOutputDesc_; // vpc output desc + + uint8_t *inDevBuffer_; // input pic dev buffer + void *vpcOutBufferDev_; // vpc output buffer + uint32_t vpcOutBufferSize_; // vpc output size + Resolution size_; +}; + diff --git a/cplusplus/common/atlasutil/src/ffmpeg_decoder.cpp b/cplusplus/common/atlasutil/src/ffmpeg_decoder.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dd626b12dc78aa5ee048d613541d0d20bcae9836 --- /dev/null +++ b/cplusplus/common/atlasutil/src/ffmpeg_decoder.cpp @@ -0,0 +1,312 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "atlas_utils.h" +#include "ffmpeg_decoder.h" + +using namespace std; + +namespace { + const int kNoFlag = 0; // no flag + + const int kInvalidVideoIndex = -1; // invalid video index + + const string kRtspTransport = "rtspTransport"; // rtsp transport + + const string kUdp = "udp"; // video format udp + + const string kTcp = "tcp"; + + const string kBufferSize = "buffer_size"; // buffer size string + + const string kMaxBufferSize = "10485760"; // maximum buffer size:10MB + + const string kMaxDelayStr = "max_delay"; // maximum delay string + + const string kMaxDelayValue = "100000000"; // maximum delay time:100s + + const string kTimeoutStr = "stimeout"; // timeout string + + const string kTimeoutValue = "5000000"; // timeout:5s + + const string kPktSize = "pkt_size"; // ffmpeg pakect size string + + const string kPktSizeValue = "10485760"; // ffmpeg packet size value:10MB + + const string kReorderQueueSize = "reorder_queue_size"; // reorder queue size + + const string kReorderQueueSizeValue = "0"; // reorder queue size value + + const int kErrorBufferSize = 1024; // buffer size for error info + + const uint32_t kDefaultFps = 5; + + const uint32_t kOneSecUs = 1000 * 1000; +} + +FFmpegDecoder::FFmpegDecoder(const std::string& streamName) +:streamName_(streamName){ + rtspTransport_.assign(kTcp.c_str()); + isFinished_ = false; + isStop_ = false; + GetVideoInfo(); +} + +void FFmpegDecoder::SetTransport(const std::string& transportType) { + rtspTransport_.assign(transportType.c_str()); +}; + +int FFmpegDecoder::GetVideoIndex(AVFormatContext* avFormatContext) { + if (avFormatContext == nullptr) { // verify input pointer + return kInvalidVideoIndex; + } + + // get video index in streams + for (uint32_t i = 0; i < avFormatContext->nb_streams; i++) { + if (avFormatContext->streams[i]->codecpar->codec_type + == AVMEDIA_TYPE_VIDEO) { // check is media type is video + return i; + } + } + + return kInvalidVideoIndex; +} + +void FFmpegDecoder::InitVideoStreamFilter(const AVBitStreamFilter*& videoFilter) { + if (videoType_ == AV_CODEC_ID_H264) { // check video type is h264 + videoFilter = av_bsf_get_by_name("h264_mp4toannexb"); + } + else { // the video type is h265 + videoFilter = av_bsf_get_by_name("hevc_mp4toannexb"); + } +} + +void FFmpegDecoder::SetDictForRtsp(AVDictionary*& avdic) { + ATLAS_LOG_INFO("Set parameters for %s", streamName_.c_str()); + + av_dict_set(&avdic, kRtspTransport.c_str(), rtspTransport_.c_str(), kNoFlag); + av_dict_set(&avdic, kBufferSize.c_str(), kMaxBufferSize.c_str(), kNoFlag); + av_dict_set(&avdic, kMaxDelayStr.c_str(), kMaxDelayValue.c_str(), kNoFlag); + av_dict_set(&avdic, kTimeoutStr.c_str(), kTimeoutValue.c_str(), kNoFlag); + av_dict_set(&avdic, kReorderQueueSize.c_str(), + kReorderQueueSizeValue.c_str(), kNoFlag); + av_dict_set(&avdic, kPktSize.c_str(), kPktSizeValue.c_str(), kNoFlag); + ATLAS_LOG_INFO("Set parameters for %s end", streamName_.c_str()); +} + +bool FFmpegDecoder::OpenVideo(AVFormatContext*& avFormatContext) { + bool ret = true; + AVDictionary* avdic = nullptr; + + av_log_set_level(AV_LOG_DEBUG); + + ATLAS_LOG_INFO("Open video %s ...", streamName_.c_str()); + SetDictForRtsp(avdic); + int openRet = avformat_open_input(&avFormatContext, + streamName_.c_str(), nullptr, + &avdic); + if (openRet < 0) { // check open video result + char buf_error[kErrorBufferSize]; + av_strerror(openRet, buf_error, kErrorBufferSize); + + ATLAS_LOG_ERROR("Could not open video:%s, return :%d, error info:%s", + streamName_.c_str(), openRet, buf_error); + ret = false; + } + + if (avdic != nullptr) { // free AVDictionary + av_dict_free(&avdic); + } + + return ret; +} + +bool FFmpegDecoder::InitVideoParams(int videoIndex, + AVFormatContext* avFormatContext, + AVBSFContext*& bsfCtx) { + const AVBitStreamFilter* videoFilter; + InitVideoStreamFilter(videoFilter); + if (videoFilter == nullptr) { // check video fileter is nullptr + ATLAS_LOG_ERROR("Unkonw bitstream filter, videoFilter is nullptr!"); + return false; + } + + // checke alloc bsf context result + if (av_bsf_alloc(videoFilter, &bsfCtx) < 0) { + ATLAS_LOG_ERROR("Fail to call av_bsf_alloc!"); + return false; + } + + // check copy parameters result + if (avcodec_parameters_copy(bsfCtx->par_in, + avFormatContext->streams[videoIndex]->codecpar) < 0) { + ATLAS_LOG_ERROR("Fail to call avcodec_parameters_copy!"); + return false; + } + + bsfCtx->time_base_in = avFormatContext->streams[videoIndex]->time_base; + + // check initialize bsf contextreult + if (av_bsf_init(bsfCtx) < 0) { + ATLAS_LOG_ERROR("Fail to call av_bsf_init!"); + return false; + } + + return true; +} + +void FFmpegDecoder::Decode(FrameProcessCallBack callback, + void *callbackParam) { + ATLAS_LOG_INFO("Start ffmpeg decode video %s ...", streamName_.c_str()); + avformat_network_init(); // init network + + AVFormatContext* avFormatContext = avformat_alloc_context(); + + // check open video result + if (!OpenVideo(avFormatContext)) { + return; + } + + int videoIndex = GetVideoIndex(avFormatContext); + if (videoIndex == kInvalidVideoIndex) { // check video index is valid + ATLAS_LOG_ERROR("Rtsp %s index is -1", streamName_.c_str()); + return; + } + + AVBSFContext* bsfCtx = nullptr; + // check initialize video parameters result + if (!InitVideoParams(videoIndex, avFormatContext, bsfCtx)) { + return; + } + + ATLAS_LOG_INFO("Start decode frame of video %s ...", streamName_.c_str()); + + AVPacket avPacket; + int processOk = true; + // loop to get every frame from video stream + while ((av_read_frame(avFormatContext, &avPacket) == 0) && processOk && !isStop_) { + if (avPacket.stream_index == videoIndex) { // check current stream is video + // send video packet to ffmpeg + if (av_bsf_send_packet(bsfCtx, &avPacket)) { + ATLAS_LOG_ERROR("Fail to call av_bsf_send_packet, channel id:%s", + streamName_.c_str()); + } + + // receive single frame from ffmpeg + while ((av_bsf_receive_packet(bsfCtx, &avPacket) == 0) && !isStop_) { + int ret = callback(callbackParam, avPacket.data, avPacket.size); + if (ret != 0) { + processOk = false; + break; + } + } + } + av_packet_unref(&avPacket); + } + + av_bsf_free(&bsfCtx); // free AVBSFContext pointer + avformat_close_input(&avFormatContext); // close input video + + isFinished_ = true; + ATLAS_LOG_INFO("Ffmpeg decoder %s finished", streamName_.c_str()); +} + +void FFmpegDecoder::GetVideoInfo() { + avformat_network_init(); // init network + AVFormatContext* avFormatContext = avformat_alloc_context(); + bool ret = OpenVideo(avFormatContext); + if (ret == false) { + ATLAS_LOG_ERROR("Open %s failed", streamName_.c_str()); + return; + } + + if (avformat_find_stream_info(avFormatContext,NULL)<0) { + ATLAS_LOG_ERROR("Get stream info of %s failed", streamName_.c_str()); + return; + } + + int videoIndex = GetVideoIndex(avFormatContext); + if (videoIndex == kInvalidVideoIndex) { // check video index is valid + ATLAS_LOG_ERROR("Video index is %d, current media stream has no " + "video info:%s", + kInvalidVideoIndex, streamName_.c_str()); + + avformat_close_input(&avFormatContext); + return; + } + + AVStream* inStream = avFormatContext->streams[videoIndex]; + + frameWidth_ = inStream->codecpar->width; + frameHeight_ = inStream->codecpar->height; + if (inStream->avg_frame_rate.den) { + fps_ = inStream->avg_frame_rate.num / inStream->avg_frame_rate.den; + } + else { + fps_ = kDefaultFps; + } + + videoType_ = inStream->codecpar->codec_id; + profile_ = inStream->codecpar->profile; + + avformat_close_input(&avFormatContext); + + ATLAS_LOG_INFO("Video %s, type %d, profile %d, width:%d, height:%d, fps:%d", + streamName_.c_str(), videoType_, profile_, frameWidth_, frameHeight_, fps_); + return; +} + + + + + + + + + + diff --git a/cplusplus/common/atlasutil/src/ffmpeg_decoder.h b/cplusplus/common/atlasutil/src/ffmpeg_decoder.h new file mode 100644 index 0000000000000000000000000000000000000000..486d9ae05ea05aa7c79f54ebb0b806541eee65f2 --- /dev/null +++ b/cplusplus/common/atlasutil/src/ffmpeg_decoder.h @@ -0,0 +1,94 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef VIDEO_FRAME_DECODE_H_ +#define VIDEO_fRAME_DECODE_H_ + +#include +#include +#include + +#include +#include +#include +#include +#include + +extern "C" { +#include +#include +} + +typedef int (*FrameProcessCallBack)(void* callback_param, void *frame_data, + int frame_size); + +class FFmpegDecoder{ +public: + FFmpegDecoder(const std::string& name); + ~FFmpegDecoder(){} + + void Decode(FrameProcessCallBack callback_func, void *callback_param); + int GetFrameWidth() { return frameWidth_; } + int GetFrameHeight() { return frameHeight_; } + int GetVideoType() { return videoType_; } + int GetFps() { return fps_; } + int IsFinished() { return isFinished_; }; + int GetProfile() { return profile_; } + + void SetTransport(const std::string& transportType); + void StopDecode(){ isStop_ = true; } + +private: + int GetVideoIndex(AVFormatContext* av_format_context); + void GetVideoInfo(); + void InitVideoStreamFilter(const AVBitStreamFilter* &video_filter); + bool OpenVideo(AVFormatContext*& av_format_context); + void SetDictForRtsp(AVDictionary* &avdic); + bool InitVideoParams(int videoIndex, + AVFormatContext* av_format_context, + AVBSFContext* &bsf_ctx); + +private: + bool isFinished_; + int frameWidth_; + int frameHeight_; + int videoType_; + int profile_; + int fps_; + std::string streamName_; + std::string rtspTransport_; + + bool isStop_; +}; + +#endif /* VIDEO_DECODE_H_ */ diff --git a/cplusplus/common/atlasutil/src/parse_config.cpp b/cplusplus/common/atlasutil/src/parse_config.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2a5e668fe3cf506301950cd9f4088a123defc4d9 --- /dev/null +++ b/cplusplus/common/atlasutil/src/parse_config.cpp @@ -0,0 +1,104 @@ +#include "parse_config.h" + +#include +#include +#include +#include + +using namespace std; + +#define COMMENT_CHAR '#' +#define EQUALS_CHAR '=' +#define BLANK_SPACE_CHAR ' ' +#define TABLE_CHAR '\t' + +bool IsSpace(char c) +{ + return (BLANK_SPACE_CHAR == c || TABLE_CHAR == c); +} + +void Trim(string& str) +{ + if (str.empty()) { + return; + } + uint32_t i, start_pos, end_pos; + for (i = 0; i < str.size(); ++i) { + if (!IsSpace(str[i])) { + break; + } + } + if (i == str.size()) { // is all blank space + str = ""; + return; + } + + start_pos = i; + + for (i = str.size() - 1; i >= 0; --i) { + if (!IsSpace(str[i])) { + break; + } + } + end_pos = i; + + str = str.substr(start_pos, end_pos - start_pos + 1); +} + +bool AnalyseLine(const string & line, string & key, string & value) +{ + if (line.empty()) { + return false; + } + + int start_pos = 0; + int end_pos = line.size() - 1; + int pos = 0; + if ((pos = line.find(COMMENT_CHAR)) != -1) { + if (0 == pos) { //the first charactor is # + return false; + } + end_pos = pos - 1; + } + string new_line = line.substr(start_pos, start_pos + 1 - end_pos); // delete comment + + if ((pos = new_line.find(EQUALS_CHAR)) == -1) + return false; // has no = + + key = new_line.substr(0, pos); + value = new_line.substr(pos + 1, end_pos + 1- (pos + 1)); + + Trim(key); + if (key.empty()) { + return false; + } + Trim(value); + return true; +} + +bool ReadConfig(map& config, const char* configFile) +{ + config.clear(); + ifstream infile(configFile); + if (!infile) { + cout << "file open error" << endl; + return false; + } + string line, key, value; + while (getline(infile, line)) { + if (AnalyseLine(line, key, value)) { + config[key] = value; + } + } + + infile.close(); + return true; +} + +void PrintConfig(const map& config) +{ + map::const_iterator mIter = config.begin(); + for (; mIter != config.end(); ++mIter) { + cout << mIter->first << "=" << mIter->second << endl; + } +} diff --git a/cplusplus/common/atlasutil/src/vdec_process.cpp b/cplusplus/common/atlasutil/src/vdec_process.cpp new file mode 100644 index 0000000000000000000000000000000000000000..30047c308d0c08f7b1517bf261b06e80261ea6d6 --- /dev/null +++ b/cplusplus/common/atlasutil/src/vdec_process.cpp @@ -0,0 +1,339 @@ +/** +* @file vdec_process.cpp +* +* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +*/ +#include "atlas_utils.h" +#include "vdec_process.h" + +using namespace std; + +namespace { + const uint32_t kFrameWidthMax = 4096; + const uint32_t kFrameHeightMax = 4096; +} + +bool g_ReportExit = false; + +VdecProcess::VdecProcess(int channelId, uint32_t width, uint32_t height, + int type, aclvdecCallback callback, uint32_t outFormat) +:channelId_(channelId) +,format_(outFormat) +,enType_(type) +,frameWidth_(width) +,frameHeight_(height) +,callback_(callback) +,isExit_(false) +,isReleased_(false){ + alignWidth_ = ALIGN_UP16(frameWidth_); + alignHeight_ = ALIGN_UP2(frameHeight_); + outputPicSize_ = YUV420SP_SIZE(alignWidth_, alignHeight_); + + vdecChannelDesc_ = nullptr; + inputStreamDesc_ = nullptr; + outputPicDesc_ = nullptr; + outputPicBuf_ = nullptr; + + ATLAS_LOG_INFO("VDEC width %d, height %d", frameWidth_, frameHeight_); +} + +VdecProcess::~VdecProcess(){ + DestroyResource(); +} + +void VdecProcess::DestroyResource(){ + if (isReleased_) return; + + aclError ret; + if (vdecChannelDesc_ != nullptr) { + ret = aclvdecDestroyChannel(vdecChannelDesc_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec destroy channel failed, errorno: %d", ret); + } + aclvdecDestroyChannelDesc(vdecChannelDesc_); + vdecChannelDesc_ = nullptr; + } + + UnsubscribReportThread(); + + if (stream_ != nullptr) { + ret = aclrtDestroyStream(stream_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec destroy stream failed"); + } + stream_ = nullptr; + } + + isReleased_ = true; +} + +void* VdecProcess::SubscribeReportThreadFunc(void *arg) { + ATLAS_LOG_INFO("Start vdec subscribe thread..."); + + // Notice: create context for this thread + int deviceId = 0; + aclrtContext context = nullptr; + aclError ret = aclrtCreateContext(&context, deviceId); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec subscribe thread create" + " context failed, errorno:%d.", ret); + return (void*)ATLAS_ERROR_CREATE_ACL_CONTEXT; + } + + VdecProcess* vdec = (VdecProcess *)arg; + + while (!vdec->IsExit()) { + // Notice: timeout 1000ms + aclrtProcessReport(1000); + } + + ret = aclrtDestroyContext(context); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec subscribe thread destroy" + " context failed, errorno:%d.", ret); + } + + ATLAS_LOG_INFO("Vdec subscribe thread exit!"); + + return (void*)ATLAS_OK; +} + +void VdecProcess::UnsubscribReportThread() { + if ((subscribeThreadId_ == 0) || (stream_ == nullptr)) return; + + (void)aclrtUnSubscribeReport(static_cast(subscribeThreadId_), + stream_); + // destory thread + isExit_ = true; + + void *res = nullptr; + int joinThreadErr = pthread_join(subscribeThreadId_, &res); + if (joinThreadErr) { + ATLAS_LOG_ERROR("Join thread failed, threadId = %lu, err = %d", + subscribeThreadId_, joinThreadErr); + } else { + if ((uint64_t)res != 0) { + ATLAS_LOG_ERROR("thread run failed. ret is %lu.", (uint64_t)res); + } + } + ATLAS_LOG_INFO("Destory report thread success."); +} + +AtlasError VdecProcess::Init() { + ATLAS_LOG_INFO("Vdec process init start..."); + + aclError aclRet = aclrtCreateStream(&stream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec create stream failed, errorno:%d", aclRet); + return ATLAS_ERROR_CREATE_STREAM; + } + ATLAS_LOG_INFO("Vdec create stream ok"); + + int ret = pthread_create(&subscribeThreadId_, nullptr, + SubscribeReportThreadFunc, (void *)this); + if (ret) { + ATLAS_LOG_ERROR("Start vdec subscribe thread failed, return:%d", ret); + return ATLAS_ERROR_CREATE_THREAD; + } + (void)aclrtSubscribeReport(static_cast(subscribeThreadId_), + stream_); + + ret = CreateVdecChannelDesc(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create vdec channel failed"); + return ret; + } + + return ATLAS_OK; +} + +AtlasError VdecProcess::CreateVdecChannelDesc() { + vdecChannelDesc_ = aclvdecCreateChannelDesc(); + if (vdecChannelDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create vdec channel desc failed"); + return ATLAS_ERROR_CREATE_DVPP_CHANNEL_DESC; + } + + // channelId: 0-15 + aclError ret = aclvdecSetChannelDescChannelId(vdecChannelDesc_, + channelId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec channel id to %d failed, errorno:%d", + channelId_, ret); + return ATLAS_ERROR_SET_VDEC_CHANNEL_ID; + } + + ret = aclvdecSetChannelDescThreadId(vdecChannelDesc_, subscribeThreadId_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec channel thread id failed, errorno:%d", ret); + return ATLAS_ERROR_SET_VDEC_CHANNEL_THREAD_ID; + } + + // callback func + ret = aclvdecSetChannelDescCallback(vdecChannelDesc_, callback_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec channel callback failed, errorno:%d", ret); + return ATLAS_ERROR_SET_VDEC_CALLBACK; + } + + ret = aclvdecSetChannelDescEnType(vdecChannelDesc_, + static_cast(enType_)); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec channel entype failed, errorno:%d", ret); + return ATLAS_ERROR_SET_VDEC_ENTYPE; + } + + ret = aclvdecSetChannelDescOutPicFormat(vdecChannelDesc_, + static_cast(format_)); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec channel pic format failed, errorno:%d", ret); + return ATLAS_ERROR_SET_VDEC_PIC_FORMAT; + } + + // create vdec channel + ATLAS_LOG_INFO("Start create vdec channel by desc..."); + ret = aclvdecCreateChannel(vdecChannelDesc_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to create vdec channel"); + return ATLAS_ERROR_CREATE_VDEC_CHANNEL; + } + ATLAS_LOG_INFO("Create vdec channel ok"); + + return ATLAS_OK; +} + +AtlasError VdecProcess::CreateInputStreamDesc(shared_ptr frameData) +{ + inputStreamDesc_ = acldvppCreateStreamDesc(); + if (inputStreamDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create input stream desc failed"); + return ATLAS_ERROR_CREATE_STREAM_DESC; + } + + aclError ret; + //如果是最后一帧,则给dvpp vdec送一个结束帧 + if (frameData->isFinished) { + ret = acldvppSetStreamDescEos(inputStreamDesc_, 1); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set EOS to input stream desc failed, errorno:%d", ret); + return ATLAS_ERROR_SET_STREAM_DESC_EOS; + } + return ATLAS_OK; + } + + ret = acldvppSetStreamDescData(inputStreamDesc_, frameData->data); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set input stream data failed, errorno:%d", ret); + return ATLAS_ERROR_SET_STREAM_DESC_DATA; + } + + // set size for dvpp stream desc + ret = acldvppSetStreamDescSize(inputStreamDesc_, frameData->size); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set input stream size failed, errorno:%d", ret); + return ATLAS_ERROR_SET_STREAM_DESC_SIZE; + } + + acldvppSetStreamDescTimestamp(inputStreamDesc_, frameData->frameId); + + return ATLAS_OK; +} + +AtlasError VdecProcess::CreateOutputPicDesc(size_t size) +{ + // Malloc output device memory + aclError ret = acldvppMalloc(&outputPicBuf_, size); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Malloc vdec output buffer failed when create " + "vdec output desc, errorno:%d", ret); + return ATLAS_ERROR_MALLOC_DVPP; + } + + outputPicDesc_ = acldvppCreatePicDesc(); + if (outputPicDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create vdec output pic desc failed"); + return ATLAS_ERROR_CREATE_PIC_DESC; + } + + ret = acldvppSetPicDescData(outputPicDesc_, outputPicBuf_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec output pic desc data failed, errorno:%d", ret); + return ATLAS_ERROR_SET_PIC_DESC_DATA; + } + + ret = acldvppSetPicDescSize(outputPicDesc_, size); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec output pic size failed, errorno:%d", ret); + return ATLAS_ERROR_SET_PIC_DESC_SIZE; + } + + ret = acldvppSetPicDescFormat(outputPicDesc_, + static_cast(format_)); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set vdec output pic format failed, errorno:%d", ret); + return ATLAS_ERROR_SET_PIC_DESC_FORMAT; + } + + return ATLAS_OK; +} + +AtlasError VdecProcess::Process(shared_ptr frameData, void* userData) +{ + //创建输入desc + AtlasError atlRet = CreateInputStreamDesc(frameData); + if (atlRet != ATLAS_OK) { + ATLAS_LOG_ERROR("Create stream desc failed"); + return atlRet; + } + //创建输出desc + atlRet = CreateOutputPicDesc(outputPicSize_); + if (atlRet != ATLAS_OK) { + ATLAS_LOG_ERROR("Create pic desc failed"); + return atlRet; + } + //将数据送到dvpp vdec解码,解码后dvpp vdec会调用注册的回调函数 + //(VideoDecode::DvppVdecCallback)处理 + aclError ret = aclvdecSendFrame(vdecChannelDesc_, inputStreamDesc_, + outputPicDesc_, nullptr, userData); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Send frame to vdec failed, errorno:%d", ret); + return ATLAS_ERROR_VDEC_SEND_FRAME; + } + + return ATLAS_OK; +} + +AtlasError VdecProcess::SetFormat(uint32_t format) { + if ((format != PIXEL_FORMAT_YUV_SEMIPLANAR_420) || + (format != PIXEL_FORMAT_YVU_SEMIPLANAR_420)) { + ATLAS_LOG_ERROR("Set video decode output image format to %d failed, " + "only support %d(YUV420SP NV12) and %d(YUV420SP NV21)", + format, + (int)PIXEL_FORMAT_YUV_SEMIPLANAR_420, + (int)PIXEL_FORMAT_YVU_SEMIPLANAR_420); + return ATLAS_ERROR_VDEC_FORMAT_INVALID; + } + + format_ = format; + ATLAS_LOG_INFO("Set video decode output image format to %d ok", format); + + return ATLAS_OK; +} + +AtlasError VdecProcess::VideoParamCheck() { + if (((frameWidth_ == 0) || (frameWidth_ > kFrameWidthMax)) || + ((frameHeight_ == 0) || (frameHeight_ > kFrameHeightMax)) || + ((format_ != PIXEL_FORMAT_YUV_SEMIPLANAR_420) && + (format_ != PIXEL_FORMAT_YVU_SEMIPLANAR_420)) || + (enType_ > (uint32_t)H264_HIGH_LEVEL)) { + return ATLAS_ERROR_VDEC_INVALID_PARAM; + } + + return ATLAS_OK; +} + + diff --git a/cplusplus/common/atlasutil/src/vdec_process.h b/cplusplus/common/atlasutil/src/vdec_process.h new file mode 100644 index 0000000000000000000000000000000000000000..ba5abc8aeb3854246ff37fe5f256f04b64f7aab2 --- /dev/null +++ b/cplusplus/common/atlasutil/src/vdec_process.h @@ -0,0 +1,75 @@ +/** +* @file vdec_process.h +* +* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +*/ +#ifndef _VDEC_PROCESS_H_ +#define _VDEC_PROCESS_H_ + +#include +#include +#include "acl/acl.h" +#include "acl/ops/acl_dvpp.h" + +class VdecProcess { +public: + VdecProcess(int channel, uint32_t width, uint32_t height, + int type, aclvdecCallback callback, + uint32_t outFormat = PIXEL_FORMAT_YUV_SEMIPLANAR_420); + ~VdecProcess(); + + static void* SubscribeReportThreadFunc(void *arg); + + AtlasError Init(); + void DestroyResource(); + AtlasError Process(std::shared_ptr frameData, void* userData); + AtlasError SetFormat(uint32_t format); + AtlasError VideoParamCheck(); + bool IsExit() { return isExit_; } + +private: + AtlasError CreateVdecChannelDesc(); + AtlasError CreateInputStreamDesc(std::shared_ptr frame); + AtlasError CreateOutputPicDesc(size_t size); + void UnsubscribReportThread(); + +private: + int channelId_; + + /* 1:YUV420 semi-planner(nv12) + 2:YVU420 semi-planner(nv21) + */ + uint32_t format_; + + /* 0:H265 main level + * 1:H264 baseline level + * 2:H264 main level + * 3:H264 high level + */ + uint32_t enType_; + + uint32_t frameWidth_; + uint32_t frameHeight_; + uint32_t alignWidth_; + uint32_t alignHeight_; + uint32_t outputPicSize_; + void *outputPicBuf_; + aclvdecCallback callback_; + + aclrtStream stream_; + + aclvdecChannelDesc *vdecChannelDesc_; + acldvppStreamDesc *inputStreamDesc_; + acldvppPicDesc *outputPicDesc_; + + pthread_t subscribeThreadId_; + bool isExit_; + bool isReleased_; + +}; + +#endif diff --git a/cplusplus/common/atlasutil/src/venc_process.cpp b/cplusplus/common/atlasutil/src/venc_process.cpp new file mode 100644 index 0000000000000000000000000000000000000000..fb6066cab93985515bdb5f640799dbb5be3387e9 --- /dev/null +++ b/cplusplus/common/atlasutil/src/venc_process.cpp @@ -0,0 +1,409 @@ +/** +* @file venc_process.cpp +* +* Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +*/ +#include +#include +#include +#include + +#include "venc_process.h" + +using namespace std; +namespace { + uint32_t kVencQueueSize = 256; + uint32_t kImageEnQueueRetryTimes = 3; + uint32_t kEnqueueWait = 10000; + uint32_t kOutqueueWait = 10000; + uint32_t kAsyncWait = 10000; + bool g_RunFlag = true; +} + +VencProcess::VencProcess(VencConfig& vencInfo): +vencInfo_(vencInfo), +status_(STATUS_VENC_INIT), +vencProc_(nullptr), +frameImageQueue_(kVencQueueSize){ +} + +AtlasError VencProcess::Init() { + if (status_ != STATUS_VENC_INIT) { + return ATLAS_ERROR; + } + + if (vencInfo_.context == nullptr) { + aclError ret = aclrtGetCurrentContext(&(vencInfo_.context)); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Get current context failed"); + return ATLAS_ERROR_GET_ACL_CONTEXT; + } + } + + thread asyncVencTh = thread(VencProcess::AsyncVencThreadEntry, (void*)this); + asyncVencTh.detach(); + + while(status_ == STATUS_VENC_INIT) { + usleep(kAsyncWait); + } + + return (status_ == STATUS_VENC_WORK)? ATLAS_OK : ATLAS_ERROR_VENC_STATUS; +} + +void VencProcess::AsyncVencThreadEntry(void* arg) { + VencProcess* thisPtr = (VencProcess*)arg; + DvppVenc venc(thisPtr->vencInfo_); + + AtlasError ret = venc.Init(); + if (ret != ATLAS_OK) { + thisPtr->SetStatus(STATUS_VENC_ERROR); + ATLAS_LOG_ERROR("Dvpp venc init acl resource failed, error %d", ret); + return; + } + + thisPtr->SetStatus(STATUS_VENC_WORK); + while(thisPtr->GetStatus() == STATUS_VENC_WORK) { + shared_ptr image = thisPtr->GetEncodeImage(); + if (image == nullptr) { + usleep(kOutqueueWait); + continue; + } + + ret = venc.Process(*image.get()); + if (ret != ATLAS_OK) { + thisPtr->SetStatus(STATUS_VENC_ERROR); + ATLAS_LOG_ERROR("Dvpp venc image failed, error %d", ret); + break; + } + } + + venc.Finish(); + thisPtr->SetStatus(STATUS_VENC_EXIT); +} + +AtlasError VencProcess::Process(ImageData& image) { + if (status_ != STATUS_VENC_WORK) { + ATLAS_LOG_ERROR("The venc(status %d) is not working", status_); + return ATLAS_ERROR_VENC_STATUS; + } + + shared_ptr imagePtr = make_shared(); + imagePtr->format = image.format; + imagePtr->width = image.width; + imagePtr->height = image.height; + imagePtr->alignWidth = image.alignWidth; + imagePtr->alignHeight = image.alignHeight; + imagePtr->size = image.size; + imagePtr->data = image.data; + + for (uint32_t count = 0; count < kImageEnQueueRetryTimes; count++) { + if (frameImageQueue_.Push(imagePtr)) { + return ATLAS_OK; + } + usleep(kEnqueueWait); + } + ATLAS_LOG_ERROR("Venc(%s) lost image for queue full", vencInfo_.outFile.c_str()); + + return ATLAS_ERROR_VENC_QUEUE_FULL; +} + +shared_ptr VencProcess::GetEncodeImage() { + shared_ptr image = frameImageQueue_.Pop(); + return image; +} + +DvppVenc::DvppVenc(VencConfig& vencInfo): +vencInfo_(vencInfo), threadId_(0), +vencChannelDesc_(nullptr), vencFrameConfig_(nullptr), +inputPicDesc_(nullptr), vencStream_(nullptr), +outFp_(nullptr), isFinished_(false){ +} + +DvppVenc::~DvppVenc(){ + DestroyResource(); +} + +void DvppVenc::Callback(acldvppPicDesc *input, + acldvppStreamDesc *output, void *userData) { + void* data = acldvppGetStreamDescData(output); + uint32_t retCode = acldvppGetStreamDescRetCode(output); + if (retCode == 0) { + //encode success, then process output pic + uint32_t size = acldvppGetStreamDescSize(output); + + DvppVenc* venc = (DvppVenc*)userData; + AtlasError ret = venc->SaveVencFile(data, size); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Save venc file failed, error %d", ret); + } else { + ATLAS_LOG_INFO("success to callback, stream size:%u", size); + } + } else { + ATLAS_LOG_ERROR("venc encode frame failed, ret = %u.", retCode); + } + + acldvppDestroyPicDesc(input); +} + +AtlasError DvppVenc::SaveVencFile(void* vencData, uint32_t size) { + AtlasError atlRet = ATLAS_OK; + + void* data = vencData; + if (vencInfo_.runMode == ACL_HOST) { + data = CopyDataToHost(vencData, size, vencInfo_.runMode, MEMORY_NORMAL); + } + + size_t ret = fwrite(data, 1, size, outFp_); + if (ret != size) { + ATLAS_LOG_ERROR("Save venc file %s failed, need write %u bytes, " + "but only write %zu bytes, error: %s", + vencInfo_.outFile.c_str(), size, ret, strerror(errno)); + atlRet = ATLAS_ERROR_WRITE_FILE; + } else { + fflush(outFp_); + } + + if (vencInfo_.runMode == ACL_HOST) { + delete[]((uint8_t *)data); + } + + return atlRet; +} + +AtlasError DvppVenc::Init() { + // create process callback thread + int ret = pthread_create(&threadId_, nullptr, + &DvppVenc::SubscribleThreadFunc, nullptr); + if (ret != 0) { + ATLAS_LOG_ERROR("Create venc subscrible thread failed, error %d", ret); + return ATLAS_ERROR_CREATE_THREAD; + } + + outFp_ = fopen(vencInfo_.outFile.c_str(), "wb+"); + if (outFp_ == nullptr) { + ATLAS_LOG_ERROR("Open file %s failed, error %s", + vencInfo_.outFile.c_str(), strerror(errno)); + return ATLAS_ERROR_OPEN_FILE; + } + + return InitResource(); +} + +void* DvppVenc::SubscribleThreadFunc(void *arg) +{ + // Notice: create context for this thread + aclrtContext context = nullptr; + aclError ret = aclrtCreateContext(&context, 0); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Create context failed, error %d.", ret); + return ((void*)(-1)); + } + + while (g_RunFlag) { + // Notice: timeout 1000ms + (void)aclrtProcessReport(1000); + } + + ret = aclrtDestroyContext(context); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("aclrtDestroyContext failed, ret=%d.", ret); + } + + return (void*)0; +} + +AtlasError DvppVenc::InitResource() +{ + aclError aclRet = aclrtSetCurrentContext(vencInfo_.context); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set context for dvpp venc failed, error %d", aclRet); + return ATLAS_ERROR_SET_ACL_CONTEXT; + } + + AtlasError ret = CreateVencChannel(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create venc channel failed, error %d", ret); + return ret; + } + + aclRet = aclrtCreateStream(&vencStream_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Create venc stream failed, error %d", aclRet); + return ATLAS_ERROR_CREATE_STREAM; + } + + aclRet = aclrtSubscribeReport(threadId_, vencStream_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Venc subscrible report failed, error %d", aclRet); + return ATLAS_ERROR_SUBSCRIBE_REPORT; + } + + ret = CreateFrameConfig(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Create venc frame config failed, error %d", ret); + return ret; + } + + ATLAS_LOG_INFO("venc init resource success"); + return ATLAS_OK; +} + +AtlasError DvppVenc::CreateVencChannel() { + // create vdec channelDesc + vencChannelDesc_ = aclvencCreateChannelDesc(); + if (vencChannelDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create venc channel desc failed"); + return ATLAS_ERROR_CREATE_VENC_CHAN_DESC; + } + + aclvencSetChannelDescThreadId(vencChannelDesc_, threadId_); + aclvencSetChannelDescCallback(vencChannelDesc_, &DvppVenc::Callback); + aclvencSetChannelDescEnType(vencChannelDesc_, vencInfo_.enType); + aclvencSetChannelDescPicFormat(vencChannelDesc_, vencInfo_.format); + aclvencSetChannelDescPicWidth(vencChannelDesc_, vencInfo_.maxWidth); + aclvencSetChannelDescPicHeight(vencChannelDesc_, vencInfo_.maxHeight); + aclvencSetChannelDescKeyFrameInterval(vencChannelDesc_, 16); + + aclError ret = aclvencCreateChannel(vencChannelDesc_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to create venc channel"); + return ATLAS_ERROR_CREATE_VENC_CHAN; + } + return ATLAS_OK; +} + +AtlasError DvppVenc::CreateFrameConfig() { + vencFrameConfig_ = aclvencCreateFrameConfig(); + if (vencFrameConfig_ == nullptr) { + ATLAS_LOG_ERROR("Create frame config"); + return ATLAS_ERROR_VENC_CREATE_FRAME_CONFIG; + } + + AtlasError ret = SetFrameConfig(0, 1); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Set frame config failed, error %d", ret); + return ret; + } + + return ATLAS_OK; +} + +AtlasError DvppVenc::SetFrameConfig(uint8_t eos, uint8_t forceIFrame) +{ + // set eos + aclError ret = aclvencSetFrameConfigEos(vencFrameConfig_, eos); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to set eos, ret = %d", ret); + return ATLAS_ERROR_VENC_SET_EOS; + } + + ret = aclvencSetFrameConfigForceIFrame(vencFrameConfig_, forceIFrame); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to set venc ForceIFrame"); + return ATLAS_ERROR_VENC_SET_IF_FRAME; + } + + return ATLAS_OK; +} + +AtlasError DvppVenc::Process(ImageData& image) +{ + // create picture desc + AtlasError ret = CreateInputPicDesc(image); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("fail to create picture description"); + return ret; + } + + // send frame + acldvppStreamDesc *outputStreamDesc = nullptr; + + ret = aclvencSendFrame(vencChannelDesc_, inputPicDesc_, + static_cast(outputStreamDesc), vencFrameConfig_, (void *)this); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("send venc frame failed, error %d", ret); + return ATLAS_ERROR_VENC_SEND_FRAME; + } + + return ATLAS_OK; +} + +AtlasError DvppVenc::CreateInputPicDesc(ImageData& image) +{ + inputPicDesc_ = acldvppCreatePicDesc(); + if (inputPicDesc_ == nullptr) { + ATLAS_LOG_ERROR("Create input pic desc failed"); + return ATLAS_ERROR_CREATE_PIC_DESC; + } + + acldvppSetPicDescFormat(inputPicDesc_, vencInfo_.format); + acldvppSetPicDescWidth(inputPicDesc_, image.width); + acldvppSetPicDescHeight(inputPicDesc_, image.height); + acldvppSetPicDescWidthStride(inputPicDesc_, ALIGN_UP16(image.width)); + acldvppSetPicDescHeightStride(inputPicDesc_, ALIGN_UP2(image.height)); + acldvppSetPicDescData(inputPicDesc_, image.data.get()); + acldvppSetPicDescSize(inputPicDesc_, image.size); + + return ATLAS_OK; +} + +void DvppVenc::Finish() { + if (isFinished_) { + return; + } + + // set frame config, eos frame + AtlasError ret = SetFrameConfig(1, 0); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Set eos frame config failed, error %d", ret); + return; + } + + // send eos frame + ret = aclvencSendFrame(vencChannelDesc_, nullptr, + nullptr, vencFrameConfig_, nullptr); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to send eos frame, ret=%u", ret); + return; + } + + fclose(outFp_); + outFp_ = nullptr; + isFinished_ = true; + ATLAS_LOG_INFO("venc process success"); + + return; +} + +void DvppVenc::DestroyResource() +{ + Finish(); + + if (vencChannelDesc_ != nullptr) { + (void)aclvencDestroyChannel(vencChannelDesc_); + (void)aclvencDestroyChannelDesc(vencChannelDesc_); + vencChannelDesc_ = nullptr; + } + + if (inputPicDesc_ != nullptr) { + (void)acldvppDestroyPicDesc(inputPicDesc_); + inputPicDesc_ = nullptr; + } + + if (vencStream_ != nullptr) { + aclError ret = aclrtDestroyStream(vencStream_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Vdec destroy stream failed, error %d", ret); + } + vencStream_ = nullptr; + } + + if (vencFrameConfig_ != nullptr) { + (void)aclvencDestroyFrameConfig(vencFrameConfig_); + vencFrameConfig_ = nullptr; + } +} diff --git a/cplusplus/common/atlasutil/src/video_decode.cpp b/cplusplus/common/atlasutil/src/video_decode.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9ea4a505cfe3e5d6d5fa814285f45b911b3a9df1 --- /dev/null +++ b/cplusplus/common/atlasutil/src/video_decode.cpp @@ -0,0 +1,580 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "atlas_utils.h" +#include "video_decode.h" + +using namespace std; + +namespace { + const int64_t kUsec = 1000000; + const uint32_t kDecodeFrameQueueSize = 256; + const int kDecodeQueueOpWait = 10000; //每次等待10毫秒 + const int kFrameEnQueueRetryTimes = 1000;//为了防止丢帧,ffmpeg解码得到的h26x入队最多等待 100秒 + const int kQueueOpRetryTimes = 1000; + const int kOutputJamWait = 10000; + const int kInvalidTpye = -1; + const int kWaitDecodeFinishInterval = 1000; + + const int kDefaultFps = 1; + const int kReadSlow = 2; + + ChannelIdGenerator channelIdGenerator; +} + +VideoDecode::VideoDecode(const std::string& videoName, aclrtContext context) : +context_(context), streamType_(STREAM_VIDEO), channelId_(INVALID_CHANNEL_ID), +frameId_(0), finFrameCnt_(0), status_(DECODE_UNINIT), streamName_(videoName), +streamFormat_(H264_MAIN_LEVEL), lastDecodeTime_(0), fpsInterval_(0), +ffmpegDecoder_(nullptr), dvppVdec_(nullptr), +frameImageQueue_(kDecodeFrameQueueSize), isStop_(false), isReleased_(false), +isJam_(false) { + if (IsRtspAddr(videoName)) { + streamType_ = STREAM_RTSP; + } +} + +VideoDecode::~VideoDecode() { + DestroyResource(); +} + +void VideoDecode::DestroyResource() { + if (isReleased_) return; + //1. stop ffmpeg + isStop_ = true; + ffmpegDecoder_->StopDecode(); + while ((status_ >= DECODE_START) && (status_ < DECODE_FFMPEG_FINISHED)) { + usleep(kWaitDecodeFinishInterval); + } + //2. delete ffmpeg decoder + delete ffmpegDecoder_; + ffmpegDecoder_ = nullptr; + //3. release dvpp vdec + delete dvppVdec_; + dvppVdec_ = nullptr; + //4. release image memory in decode output queue + do { + shared_ptr frame = FrameImageOutQueue(true); + if (frame == nullptr) { + break; + } + + if (frame->data != nullptr) { + acldvppFree(frame->data.get()); + frame->data = nullptr; + } + }while(1); + //5. release channel id + channelIdGenerator.ReleaseChannelId(channelId_); + + isReleased_ = true; +} + +AtlasError VideoDecode::InitResource() { + aclError aclRet; + //1. Set acl context of video decoder, use current thread context default + if (context_ == nullptr) { + aclRet = aclrtGetCurrentContext(&context_); + if ((aclRet != ACL_ERROR_NONE) || (context_ == nullptr)) { + ATLAS_LOG_ERROR("Get current acl context error:%d", aclRet); + return ATLAS_ERROR_GET_ACL_CONTEXT; + } + } + AtlasError ret = SetAclContext(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Set video decoder acl context error:%d", aclRet); + return ret; + } + //2.Get current run mode + aclRet = aclrtGetRunMode(&runMode_); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("acl get run mode failed"); + return ATLAS_ERROR_GET_RUM_MODE; + } + + return ATLAS_OK; +} + +AtlasError VideoDecode::InitVdecDecoder() { + //Generate a unique channel id for video decoder + channelId_ = channelIdGenerator.GenerateChannelId(); + if (channelId_ == INVALID_CHANNEL_ID) { + ATLAS_LOG_ERROR("Decoder number excessive %d", VIDEO_CHANNEL_MAX); + return ATLAS_ERROR_TOO_MANY_VIDEO_DECODERS; + } + + //Create dvpp vdec to decode h26x data + dvppVdec_ = new VdecProcess(channelId_, ffmpegDecoder_->GetFrameWidth(), + ffmpegDecoder_->GetFrameHeight(), + streamFormat_, VideoDecode::DvppVdecCallback); + AtlasError ret = dvppVdec_->Init(); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Dvpp vdec init failed"); + } + + return ret; +} + +AtlasError VideoDecode::InitFFmpegDecoder() { + //Create ffmpeg decoder to parse video stream to h26x frame data + ffmpegDecoder_ = new FFmpegDecoder(streamName_); + if (kInvalidTpye == GetVdecType()) { + this->SetStatus(DECODE_ERROR); + delete ffmpegDecoder_; + ATLAS_LOG_ERROR("Video %s type is invalid", streamName_.c_str()); + return ATLAS_ERROR_FFMPEG_DECODER_INIT; + } + + //Get video fps, if no fps, use 1 as default + int fps = ffmpegDecoder_->GetFps(); + if (fps == 0) { + fps = kDefaultFps; + ATLAS_LOG_INFO("Video %s fps is 0, change to %d", + streamName_.c_str(), fps); + } + //Cal the frame interval time(us) + fpsInterval_ = kUsec / fps; + + return ATLAS_OK; +} + +AtlasError VideoDecode::Open() { + //Open video stream, if open failed before, return error directly + if (status_ == DECODE_ERROR) + return ATLAS_ERROR_OPEN_VIDEO_UNREADY; + //If open ok already + if (status_ != DECODE_UNINIT) + return ATLAS_OK; + //Init acl resource + AtlasError ret = InitResource(); + if (ret != ATLAS_OK) { + this->SetStatus(DECODE_ERROR); + ATLAS_LOG_ERROR("Open %s failed for init resource error: %d", + streamName_.c_str(), ret); + return ret; + } + //Init ffmpeg decoder + ret = InitFFmpegDecoder(); + if (ret != ATLAS_OK) { + this->SetStatus(DECODE_ERROR); + ATLAS_LOG_ERROR("Open %s failed for init ffmpeg error: %d", + streamName_.c_str(), ret); + return ret; + } + //Init dvpp vdec decoder + ret = InitVdecDecoder(); + if (ret != ATLAS_OK) { + this->SetStatus(DECODE_ERROR); + ATLAS_LOG_ERROR("Open %s failed for init vdec error: %d", + streamName_.c_str(), ret); + return ret; + } + //Set init ok + this->SetStatus(DECODE_READY); + ATLAS_LOG_INFO("Video %s decode init ok", streamName_.c_str()); + + return ATLAS_OK; +} + +int VideoDecode::GetVdecType() { + //VDEC only support H265 main level,264 baseline level,main level,high level + int type = ffmpegDecoder_->GetVideoType(); + int profile = ffmpegDecoder_->GetProfile(); + if (type == AV_CODEC_ID_HEVC) { + streamFormat_ = H265_MAIN_LEVEL; + } else if (type == AV_CODEC_ID_H264) { + switch(profile) { + case FF_PROFILE_H264_BASELINE: + streamFormat_ = H264_BASELINE_LEVEL; + break; + case FF_PROFILE_H264_MAIN: + streamFormat_ = H264_MAIN_LEVEL; + break; + case FF_PROFILE_H264_HIGH: + case FF_PROFILE_H264_HIGH_10: + case FF_PROFILE_H264_HIGH_10_INTRA: + case FF_PROFILE_H264_MULTIVIEW_HIGH: + case FF_PROFILE_H264_HIGH_422: + case FF_PROFILE_H264_HIGH_422_INTRA: + case FF_PROFILE_H264_STEREO_HIGH: + case FF_PROFILE_H264_HIGH_444: + case FF_PROFILE_H264_HIGH_444_PREDICTIVE: + case FF_PROFILE_H264_HIGH_444_INTRA: + streamFormat_ = H264_HIGH_LEVEL; + break; + default: + ATLAS_LOG_INFO("Not support h264 profile %d, use as mp", profile); + streamFormat_ = H264_MAIN_LEVEL; + break; + } + } else { + streamFormat_ = kInvalidTpye; + ATLAS_LOG_ERROR("Not support stream, type %d, profile %d", type, profile); + } + + return streamFormat_; +} + +//dvpp vdec callback +void VideoDecode::DvppVdecCallback(acldvppStreamDesc *input, + acldvppPicDesc *output, void *userData) +{ + VideoDecode* decoder = (VideoDecode*)userData; + //Get decoded image parameters + shared_ptr image = make_shared(); + image->format = acldvppGetPicDescFormat(output); + image->width = acldvppGetPicDescWidth(output); + image->height = acldvppGetPicDescHeight(output); + image->alignWidth = acldvppGetPicDescWidthStride(output); + image->alignHeight = acldvppGetPicDescHeightStride(output); + image->size = acldvppGetPicDescSize(output); + + void* vdecOutBufferDev = acldvppGetPicDescData(output); + image->data = SHARED_PRT_DVPP_BUF(vdecOutBufferDev); + + //Put the decoded image to queue for read + decoder->ProcessDecodedImage(image); + //Release resouce + aclError ret = acldvppDestroyPicDesc(output); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to destroy pic desc, error %d", ret); + } + + if (input != nullptr) { + void* inputBuf = acldvppGetStreamDescData(input); + if (inputBuf != nullptr) { + acldvppFree(inputBuf); + } + + aclError ret = acldvppDestroyStreamDesc(input); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("fail to destroy input stream desc"); + } + } +} + +void VideoDecode::ProcessDecodedImage(shared_ptr frameData) { + finFrameCnt_++; + if (YUV420SP_SIZE(frameData->width, frameData->height) != frameData->size) { + ATLAS_LOG_ERROR("Invalid decoded frame parameter, " + "width %d, height %d, size %d, buffer %p", + frameData->width, frameData->height, + frameData->size, frameData->data.get()); + return; + } + + FrameImageEnQueue(frameData); + + if ((status_ == DECODE_FFMPEG_FINISHED) && (finFrameCnt_ >= frameId_)) { + ATLAS_LOG_INFO("Last frame decoded by dvpp, change status to %d", + DECODE_DVPP_FINISHED); + this->SetStatus(DECODE_DVPP_FINISHED); + } +} + +AtlasError VideoDecode::FrameImageEnQueue(shared_ptr frameData) { + for (int count = 0; count < kFrameEnQueueRetryTimes; count++) { + if (frameImageQueue_.Push(frameData)) + return ATLAS_OK; + usleep(kDecodeQueueOpWait); + } + ATLAS_LOG_ERROR("Video %s lost decoded image for queue full", + streamName_.c_str()); + + return ATLAS_ERROR_VDEC_QUEUE_FULL; +} + +//start decoder +void VideoDecode::StartFrameDecoder() { + if (status_ == DECODE_READY) { + + decodeThread_ = thread(FrameDecodeThreadFunction, (void*)this); + decodeThread_.detach(); + + status_ = DECODE_START; + } +} + +//ffmpeg decoder entry +void VideoDecode::FrameDecodeThreadFunction(void* decoderSelf) { + VideoDecode* thisPtr = (VideoDecode*)decoderSelf; + + aclError aclRet = thisPtr->SetAclContext(); + if (aclRet != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Set frame decoder context failed, errorno:%d", + aclRet); + return; + } + //start decode until complete + thisPtr->FFmpegDecode(); + if (thisPtr->IsStop()) { + thisPtr->SetStatus(DECODE_FINISHED); + return; + } + thisPtr->SetStatus(DECODE_FFMPEG_FINISHED); + //when ffmpeg decode finish, send eos to vdec + shared_ptr videoFrame = make_shared(); + videoFrame->isFinished = true; + videoFrame->data = nullptr; + videoFrame->size = 0; + thisPtr->dvppVdec_->Process(videoFrame, decoderSelf); + + while((thisPtr->GetStatus() != DECODE_DVPP_FINISHED) && !thisPtr->IsStop()) { + usleep(kWaitDecodeFinishInterval); + } +} + +//callback of ffmpeg decode frame +AtlasError VideoDecode::FrameDecodeCallback(void* decoder, void* frameData, + int frameSize) { + if ((frameData == NULL) || (frameSize == 0)) { + ATLAS_LOG_ERROR("Frame data is null"); + return ATLAS_ERROR_H26X_FRAME; + } + + //将ffmpeg解码得到的h26x数据拷贝到dvpp内存 + VideoDecode* videoDecoder = (VideoDecode*)decoder; + + void* buffer = CopyDataToDevice(frameData, frameSize, + videoDecoder->runMode_, MEMORY_DVPP); + if (buffer == nullptr) { + ATLAS_LOG_ERROR("Copy frame h26x data to dvpp failed"); + return ATLAS_ERROR_COPY_DATA; + } + + shared_ptr videoFrame = make_shared(); + videoDecoder->frameId_++; + videoFrame->frameId = videoDecoder->frameId_; + videoFrame->data = buffer; + videoFrame->size = frameSize; + //使用dvpp vdec解码h26x帧数据 + AtlasError ret = videoDecoder->dvppVdec_->Process(videoFrame, decoder); + if (ret != ATLAS_OK) { + ATLAS_LOG_ERROR("Dvpp vdec process %dth frame failed, error:%d", + videoDecoder->frameId_, ret); + return ret; + } + + //根据视频帧率等待下一帧 + videoDecoder->SleeptoNextFrameTime(); + return ATLAS_OK; +} + +void VideoDecode::SleeptoNextFrameTime() { + while(frameImageQueue_.Size() > kReadSlow) { + if (isStop_) { + return; + } + usleep(kOutputJamWait); + } + + if (streamType_ == STREAM_RTSP) { + usleep(0); + return; + } + + //获取当前时间 + timeval tv; + gettimeofday(&tv, 0); + int64_t now = (int64_t)tv.tv_sec * 1000000 + (int64_t)tv.tv_usec; + + if (lastDecodeTime_ == 0) { + lastDecodeTime_ = now; + return; + } + //计算到解码一帧后还剩余的时间 + int64_t lastInterval = (now - lastDecodeTime_); + int64_t sleepTime = (lastInterval < fpsInterval_)?(fpsInterval_-lastInterval):0; + //耗完一帧解码时间 + usleep(sleepTime); + //记录下一帧解码的开始时间 + gettimeofday(&tv, 0); + lastDecodeTime_ = (int64_t)tv.tv_sec * 1000000 + (int64_t)tv.tv_usec; + + return; +} + +//当前解码器是否准备好 +bool VideoDecode::IsOpened() { + ATLAS_LOG_INFO("Video %s decode status %d", streamName_.c_str(), status_); + return (status_ == DECODE_READY) || (status_ == DECODE_START); +} + +/*读取一帧解码后的yuv图像*/ +AtlasError VideoDecode::Read(ImageData& image) { + //如果当前解码器异常或者解码结束,则直接返回nullptr + if (status_ == DECODE_ERROR) { + ATLAS_LOG_ERROR("Read failed for decode %s failed", + streamName_.c_str()); + return ATLAS_ERROR_VIDEO_DECODER_STATUS; + } + + if (status_ == DECODE_FINISHED) { + ATLAS_LOG_INFO("No frame to read for decode %s finished", + streamName_.c_str()); + return ATLAS_ERROR_DECODE_FINISH; + } + //如果当前只是准备好,但是还未开始解码.Read的调用触发解码开始 + if (status_ == DECODE_READY) { + StartFrameDecoder(); + usleep(kDecodeQueueOpWait); + } + //从解码后图片存放队列读取一帧图片 + bool noWait = (status_ == DECODE_DVPP_FINISHED); + shared_ptr frame = FrameImageOutQueue(noWait); + if (noWait && (frame == nullptr)) { + SetStatus(DECODE_FINISHED); + ATLAS_LOG_INFO("No frame to read anymore"); + return ATLAS_ERROR_DECODE_FINISH; + } + + if (frame == nullptr) { + ATLAS_LOG_ERROR("No frame image to read abnormally"); + return ATLAS_ERROR_READ_EMPTY; + } + + image.format = frame->format; + image.width = frame->width; + image.height = frame->height; + image.alignWidth = frame->alignWidth; + image.alignHeight = frame->alignHeight; + image.size = frame->size; + image.data = frame->data; + + return ATLAS_OK; +} + +shared_ptr VideoDecode::FrameImageOutQueue(bool noWait) { + shared_ptr image = frameImageQueue_.Pop(); + + if (noWait || (image != nullptr)) return image; + + for (int count = 0; count < kQueueOpRetryTimes - 1; count++) { + usleep(kDecodeQueueOpWait); + + image = frameImageQueue_.Pop(); + if (image != nullptr) + return image; + } + + return nullptr; +} + +AtlasError VideoDecode::Set(StreamProperty key, int value) { + AtlasError ret = ATLAS_OK; + switch(key) { + case OUTPUT_IMAGE_FORMAT: + ret = dvppVdec_->SetFormat(value); + break; + case RTSP_TRANSPORT: + ret = SetRtspTransType(value); + break; + default: + ret = ATLAS_ERROR_UNSURPPORT_PROPERTY; + ATLAS_LOG_ERROR("Unsurpport property %d to set for video %s", + (int)key, streamName_.c_str()); + break; + } + + return ret; +} + +AtlasError VideoDecode::SetRtspTransType(uint32_t transCode) { + AtlasError ret = ATLAS_OK; + + if (transCode == RTSP_TRANS_UDP) + ffmpegDecoder_->SetTransport(RTSP_TRANSPORT_UDP); + else if (transCode == RTSP_TRANS_TCP) + ffmpegDecoder_->SetTransport(RTSP_TRANSPORT_TCP); + else { + ret = ATLAS_ERROR_INVALID_PROPERTY_VALUE; + ATLAS_LOG_ERROR("Unsurport rtsp transport property value %d", + transCode); + } + + return ret; +} + +uint32_t VideoDecode::Get(StreamProperty key) { + uint32_t value = 0; + + switch(key){ + case FRAME_WIDTH: + value = ffmpegDecoder_->GetFrameWidth(); + break; + case FRAME_HEIGHT: + value = ffmpegDecoder_->GetFrameHeight(); + break; + case VIDEO_FPS: + value = ffmpegDecoder_->GetFps(); + break; + default: + ATLAS_LOG_ERROR("Unsurpport property %d to get for video", key); + break; + } + + return value; +} + +AtlasError VideoDecode::SetAclContext() { + if (context_ == nullptr) { + ATLAS_LOG_ERROR("Video decoder context is null"); + return ATLAS_ERROR_SET_ACL_CONTEXT; + } + + aclError ret = aclrtSetCurrentContext(context_); + if (ret != ACL_ERROR_NONE) { + ATLAS_LOG_ERROR("Video decoder set context failed, error: %d", ret); + return ATLAS_ERROR_SET_ACL_CONTEXT; + } + + return ATLAS_OK; +} + +AtlasError VideoDecode::Close() { + DestroyResource(); + return ATLAS_OK; +} + diff --git a/cplusplus/common/atlasutil/src/video_decode.h b/cplusplus/common/atlasutil/src/video_decode.h new file mode 100644 index 0000000000000000000000000000000000000000..5d0cbdeacca892bfa6fe04e7e83031fa00dc39b8 --- /dev/null +++ b/cplusplus/common/atlasutil/src/video_decode.h @@ -0,0 +1,184 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef VIDEO_DECODE_H_ +#define VIDEO_DECODE_H_ + +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "thread_safe_queue.h" +#include "ffmpeg_decoder.h" +#include "vdec_process.h" +#include "atlas_videocapture.h" + +#define INVALID_STREAM_FORMAT -1 + +#define RTSP_TRANSPORT_UDP "udp" +#define RTSP_TRANSPORT_TCP "tcp" + +#define VIDEO_CHANNEL_MAX 23 +#define INVALID_CHANNEL_ID -1 + +enum StreamType { + STREAM_VIDEO = 0, + STREAM_RTSP, +}; + +enum DecodeStatus { + DECODE_ERROR = -1, + DECODE_UNINIT = 0, + DECODE_READY = 1, + DECODE_START = 2, + DECODE_FFMPEG_FINISHED = 3, + DECODE_DVPP_FINISHED = 4, + DECODE_FINISHED = 5 +}; + +class ChannelIdGenerator { +public: + ChannelIdGenerator() { + for (int i = 0; i < VIDEO_CHANNEL_MAX; i++) { + channelId_[i] = INVALID_CHANNEL_ID; + } + } + ~ChannelIdGenerator(){}; + + int GenerateChannelId(void) { + std::lock_guard lock(mutex_lock_); + for (int i = 0; i < VIDEO_CHANNEL_MAX; i++) { + if (channelId_[i] == INVALID_CHANNEL_ID) { + channelId_[i] = i; + return i; + } + } + + return INVALID_CHANNEL_ID; + } + + void ReleaseChannelId(int channelId) { + std::lock_guard lock(mutex_lock_); + if ((channelId >= 0) && (channelId < VIDEO_CHANNEL_MAX)) { + channelId_[channelId] = INVALID_CHANNEL_ID; + } + } + +private: + int channelId_[VIDEO_CHANNEL_MAX]; + mutable std::mutex mutex_lock_; +}; + + +class VideoDecode : public AtlasVideoCapBase { + public: + /** + * @brief VideoDecode constructor + */ + VideoDecode(const std::string& videoName, aclrtContext context = nullptr); + + /** + * @brief VideoDecode destructor + */ + ~VideoDecode(); + + static void FrameDecodeThreadFunction(void* decoderSelf); + static AtlasError FrameDecodeCallback(void* context, void* frameData, + int frameSize); + static void DvppVdecCallback(acldvppStreamDesc *input, + acldvppPicDesc *output, void *userdata); + + AtlasError DecodeH26xFrame(); + void ProcessDecodedImage(std::shared_ptr frameData); + AtlasError Read(ImageData& image); + + void FFmpegDecode() { ffmpegDecoder_->Decode(&VideoDecode::FrameDecodeCallback, (void*) this); } + + bool IsOpened(); + AtlasError Open(); + + void SetStatus(DecodeStatus status) { status_ = status; } + DecodeStatus GetStatus() { return status_; } + + AtlasError Set(StreamProperty key, int value); + uint32_t Get(StreamProperty key); + + void SleeptoNextFrameTime(); + AtlasError SetAclContext(); + AtlasError Close(); + + void DestroyResource(); + bool IsStop() { return isStop_; } + bool IsJam() { return isJam_; } + +private: + AtlasError InitResource(); + AtlasError InitVdecDecoder(); + AtlasError InitFFmpegDecoder(); + void StartFrameDecoder(); + int GetVdecType(); + AtlasError FrameImageEnQueue(std::shared_ptr frameData); + std::shared_ptr FrameImageOutQueue(bool noWait = false); + AtlasError SetRtspTransType(uint32_t transCode); + +private: + aclrtContext context_; + aclrtRunMode runMode_; + StreamType streamType_; + + int channelId_; + uint32_t frameId_; + uint32_t finFrameCnt_; + DecodeStatus status_; + std::string streamName_; + int streamFormat_; + int64_t lastDecodeTime_; + int64_t fpsInterval_; + std::thread decodeThread_; + FFmpegDecoder* ffmpegDecoder_; + VdecProcess* dvppVdec_; + + ThreadSafeQueue> frameImageQueue_; + + bool isStop_; + bool isReleased_; + bool isJam_; +}; + +#endif /* VIDEO_DECODE_H_ */ diff --git a/cplusplus/common/presenteragent/Makefile b/cplusplus/common/presenteragent/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..5c19aabcb1da4221a56869e94a35c9ded460c3f1 --- /dev/null +++ b/cplusplus/common/presenteragent/Makefile @@ -0,0 +1,93 @@ +ifndef DDK_PATH +$(error "Can not find DDK_PATH env, please set it in environment!.") +endif + +ifeq ($(mode),) +mode=AtlasDK +endif + +ifeq ($(mode), AtlasDK) +CC := aarch64-linux-gnu-g++ +ARCH := arm +else ifeq ($(mode), ASIC) +CC := g++ +ARCH := x86 +else +$(error "Unsupported mode: "$(mode)", please input: AtlasDK or ASIC.") +endif + + +LOCAL_MODULE_NAME := libpresenteragent.so + +LOCAL_DIR := . +OUT_DIR = out +OBJ_DIR = $(OUT_DIR)/obj +DEPS_DIR = $(OUT_DIR)/deps +LOCAL_LIBRARY=$(OUT_DIR)/$(LOCAL_MODULE_NAME) +OUT_INC_DIR = $(OUT_DIR)/include + +INC_DIR := \ + -I$(LOCAL_DIR) \ + -I$(LOCAL_DIR)/include \ + -I$(DDK_PATH)/acllib/include \ + -I$(HOME)/ascend_ddk/$(ARCH)/include \ + -I$(DDK_PATH)/atc/include/protobuf \ + -I/usr/local/include \ + -I$(LOCAL_DIR)/src \ + +SRCS := $(patsubst $(LOCAL_DIR)/%.cpp, %.cpp, $(shell find $(LOCAL_DIR)/src -name *.cpp)) +OBJS := $(addprefix $(OBJ_DIR)/, $(patsubst %.cpp, %.o,$(SRCS))) + +PROTO_SRCS = $(patsubst $(LOCAL_DIR)/%.cc, %.cc, $(shell find $(LOCAL_DIR)/proto -name *.pb.cc)) +PROTO_OBJS := $(addprefix $(OBJ_DIR)/, $(patsubst %.cc, %.o,$(PROTO_SRCS))) + +ALL_OBJS := $(OBJS) \ + $(PROTO_OBJS) \ + +CC_FLAGS := $(INC_DIR) -std=c++11 -Wall -fPIC -O2 + +LNK_FLAGS := \ + -Wl,-rpath-link=$(DDK_PATH)/atc/lib64 \ + -L$(DDK_PATH)/atc/lib64 \ + -L$(DDK_PATH)/acllib/lib64 \ + -L$(HOME)/ascend_ddk/$(ARCH)/lib \ + -lprotobuf \ + -lascendcl \ + -shared + +all: do_pre_build do_build + +do_pre_build: + $(Q)echo - do [$@] + $(Q)mkdir -p $(OBJ_DIR) + $(Q)mkdir -p $(OUT_INC_DIR) + +do_build: $(LOCAL_LIBRARY) | do_pre_build + $(Q)echo - do [$@] + +$(LOCAL_LIBRARY): $(ALL_OBJS) + $(Q)echo [LD] $@ + $(Q)$(CC) $(CC_FLAGS) -o $@ $^ -Wl,--whole-archive -Wl,--no-whole-archive -Wl,--start-group -Wl,--end-group $(LNK_FLAGS) + $(Q)cp -R $(LOCAL_DIR)/include/* $(OUT_INC_DIR) + +$(OBJS): $(OBJ_DIR)/%.o : %.cpp | do_pre_build + $(Q)echo [CC] $@ + $(Q)mkdir -p $(dir $@) + $(Q)$(CC) $(CC_FLAGS) $(INC_DIR) -c -fstack-protector-all $< -o $@ + + +$(PROTO_OBJS) : proto/presenter_message.pb.cc | do_pre_build + $(Q)echo [CC] $@ + $(Q)mkdir -p $(dir $@) + $(Q)$(CC) $(CC_FLAGS) $(INC_DIR) -c -fstack-protector-all $< -o $@ + +install: all + $(Q)echo [INSTALL] $@ + $(Q)mkdir -p $(HOME)/ascend_ddk/$(ARCH)/include + $(Q)mkdir -p $(HOME)/ascend_ddk/$(ARCH)/lib + $(Q)cp -R $(OUT_INC_DIR)/ascenddk* $(HOME)/ascend_ddk/$(ARCH)/include/ + $(Q)cp -R $(OUT_DIR)/lib*.so $(HOME)/ascend_ddk/$(ARCH)/lib/ + +install pcie: + +clean: diff --git a/cplusplus/common/presenteragent/README_CN.md b/cplusplus/common/presenteragent/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..735db6d2cdce9c142701284cf76462df2813a030 --- /dev/null +++ b/cplusplus/common/presenteragent/README_CN.md @@ -0,0 +1,239 @@ +中文|[英文](README.md) + +Presenter部署在Mind Studio所在的Linux服务器上,主要作用是推理结果的展示。 + +Presenter包括Presenter Server与Presenter Agent。 + +- Presenter Agent提供一系列API,用户通过调用API向Presenter Server推送媒体消息。 +- Presenter Server接收Presenter Agent发过来的数据,通过浏览器进行结果展示。 + +## Presenter Server + +- **Description** + + Presenter Server是展示推理结果的软件包,该软件基于python3实现,并用到了第三方web框架tornado,以及底层通信框架protobuf。 + + Presenter Server支持图片模式和视频模式。图片模式展示单张图片,视频模式以图片流的方式展示连续图片。Presenter server通过channel来标记不同的数据源,在浏览器里通过Create按钮添加channel,Delete进行删除,默认支持两路channel,分别是image和video。 + +- **Sample Code** + + Presenter Server源码在样例的工程目录下,config是配置文件目录,修改config.conf进行可服务端ip和port的定制,logging.conf是logging模块的配置;src是源码目录,其中,presenter\_message\_pb2.py 定义protobuf格式,presenter\_socket\_server.py 负责并行接收数据,webapp.py 负责把数据推送到Chrome,进行前台展示;ui是web界面素材所在目录。 + + 与Presenter Agent的消息通信: + + Presenter Server 与Presenter Agent和Chrome的消息通信如下图所示,Chrome上发起创建channel的操作,Presenter Server发送数据到指定channel,Chrome打开此channel,观察推理结果。 + + ![输入图片说明](https://images.gitee.com/uploads/images/2021/0112/114222_18033d3b_7401379.png "屏幕截图.png") + + Presenter Server与Presenter Agent之间消息结构如下,依次是4个字节的消息总长度,1个字节的消息名长度,若干字节的消息消息名,若干字节的protobuf内容。 + + ``` + -------------------------------------------------------------- + |total message len | int | 4 bytes | + |-------------------------------------------------------------- + |message name len | byte | 1 byte | + |-------------------------------------------------------------- + |message name | string | xx bytes | + |-------------------------------------------------------------- + |message body | protobuf | xx bytes | + --------------------------------------------------------------- + ``` + + 主要的消息有两个,一个是打开channel的请求消息OpenChannelRequest ,另一个是发送数据的消息PresentImageRequest ,其在ptorobuf中的定义如下: + + ``` + message OpenChannelRequest { + string channel_name = 1; //channel 名称 + ChannelContentType content_type = 2; //数据模式,用来识别image和video + } + + message PresentImageRequest { + ImageFormat format = 1; // 图片格式,当前仅支持Jpeg + uint32 width = 2; //图片宽度 + uint32 height =3; //图片高度 + bytes data = 4; //图片数据 + } + ``` + + 通过epoll实现多路channel并行工作,实现伪码如下: + + ``` + def _server_listen_thread(self): + """socket server thread, epoll listening all the socket events""" + epoll = select.epoll() + epoll.register(self._sock_server.fileno(), select.EPOLLIN | select.EPOLLHUP) + try: + conns = {} + msgs = {} + while True: + events = epoll.poll(EPOLL_TIMEOUT) + # timeout, but no event come, continue waiting + if not events: + continue + for sock_fileno, event in events: + # new connection request from presenter agent + if self._sock_server.fileno() == sock_fileno: + self._accept_new_socket(epoll, conns) + # remote connection closed + # it means presenter agent exit withot close socket. + elif event & select.EPOLLHUP: + self._clean_connect(sock_fileno, epoll, conns, msgs) + # new data coming in a socket connection + elif event & select.EPOLLIN: + self._process_epollin(sock_fileno, epoll, conns, msgs) + # receive event not recognize + else: + self._clean_connect(sock_fileno, epoll, conns, msgs) + finally: + epoll.unregister(self._sock_server.fileno()) + epoll.close() + self._sock_server.close() + ``` + + 消息解析过程,实现伪码如下,首先是解析消息,包括消息长度,消息名,最后读取protobuf并进行处理。 + + ``` + def _read_sock_and_process_msg(self, sock_fileno, conns, msgs): + # Step1: read msg head + msg_total_len, msg_name_len = self._read_msg_head(sock_fileno, conns) + if msg_total_len is None: + return PRESENTER_ERR + # Step2: read msg name + msg_name = self._read_msg_name(conns[sock_fileno], msg_name_len) + if msg_name == SOCKET_RECEIVE_NULL: + return PRESENTER_ERR + try: + msg_name = msg_name.decode("utf-8") + except UnicodeDecodeError: + return PRESENTER_ERR + # Step3: read msg body + msg_body_len = msg_total_len - MSG_HEAD_LENGTH - msg_name_len + ret = self._read_msg_body(sock_fileno, conns, msgs, msg_name, msg_body_len) + if ret == PRESENTER_ERR: + return ret + # Step4: process msg + ret = self._process_msg(conns[sock_fileno], msg_name, msgs[sock_fileno]) + return ret + ``` + + 解析protobuf,来自Presenter Agent的消息请求共有三个,分别是打开channel、发送数据、发送心跳。 + + ``` + def _process_msg(self, conn, msg_name, msg_data): + # process open channel request + if msg_name == OPEN_CHANNEL_REQUEST_FULL_NAME: + ret = self._process_open_channel(conn, msg_data) + # process image request, receive an image data from presenter agent + elif msg_name == PRESENT_IMAGE_REQUEST_FULL_NAME: + ret = self._process_image_request(conn, msg_data) + # process heartbeat request, it used to keepalive a channel path + elif msg_name == HEART_BEAT_MESSAGE_FULL_NAME: + ret = self._process_heartbeat(conn) + else: + ret = PRESENTER_ERR + return ret + ``` + + +## Present Agent + +Presenter Agent提供一系列API,用户可以调用这些API向Presenter Server推送媒体消息,并在浏览器中查看。当前支持JPEG格式图片的推送。 + +调用流程如下所示: + +![输入图片说明](https://images.gitee.com/uploads/images/2021/0112/114246_8c1504f0_7401379.png "屏幕截图.png") + +1. App调用OpenChannel函数打开与Presenter Server间的通道。 +2. App调用SendMessage函数在该通道上推送媒体消息。推送消息时, 支持在推送的图片上画矩形框。使用时需要将框的左上、右下点的坐标、框的标题设置到PresentImageRequest对象中。 +3. 所有图片发送完成后,App调用CloseChannel函数释放分配的资源。 + +- **Sample Code** + + 以发送图片为例: + + 1. Open channel + + ``` + OpenChannelParam param; + param.hostIp = "127.0.0.1"; //IP address of Presenter Server + param.port = 7006; //port of present service + param.channelName = "image"; + param.contentType = ContentType::kImage; //content type is IMAGE + + Channel *channel = nullptr; + PresenterErrorCode errorCode = OpenChannel(channel, param); + if (errorCode != PresenterErrorCode::kNone) { + return; + } + ``` + + 2. SendMessage + + ``` + ascend::presenter::proto::PresentImageRequest request; + request.set_data(string(reinterpret_cast(buffer), size)); //image data buffer, image shuold be jpeg format + request.set_width(1920); + request.set_height(1280); + + //Set the rectangles info into request. + ascend::presenter::proto::Rectangle_Attr *rectangle_attr = nullptr; + rectangle_attr = request.add_rectangle_list(); //Add one rectangle + rectangle_attr->mutable_left_top()-> set_x(100); + rectangle_attr->mutable_left_top()-> set_y(100); + rectangle_attr->mutable_right_bottom()->set_x(500); + rectangle_attr->mutable_right_bottom()->set_y(500); + rectangle_attr->set_label_text("This is a title"); //Set the title for the rectangle + + ascend::presenter::PresenterErrorCode error_code = ascend::presenter::SendMessage(channel, request) + + ``` + + 3. Close Channel + + ``` + delete channel; + ``` + + + 如果需要发送一系列图片来展示视频的效果,则将[1](#zh-cn_topic_0147635264_li182791110135216)中的contentType改为ContentType::kVideo,并 + + 不断的调用SendMessage即可。 + +- **修改源码** + + 编译源码需要使用protoc编译proto文件,请从[https://github.com/protocolbuffers/protobuf/releases/](http://code.google.com/p/protobuf/downloads/list)中获取软件包protoc-3.5.1-linux- + + x86\_64.zip,并参照包中的readme进行安装。 + + 主要源码结构如下所示: + + ``` + common/presenter/agent PresenterAgent源码根目录 + ├─proto Protobuf消息定义 + ├─include/ascendk/presenter/agent API头文件 + ├─channel.h 通用channel的接口,提供收发protobuf消息的功能 + ├─errors.h 错误码 + ├─presenter_channel.h 封装了发送媒体数据到PresenterServer的功能 + ├─presenter_types.h 封装了发送媒体数据到PresenterServer的功能 + ├─src/asceddk/presenter/agent 源码目录 + ├─channel 与PresenterServer交互相关源码 + ├─default_channel.cpp Channel类的默认实现,维护与Server间的长连接 + ├─default_channel.h Channel类的默认实现的头文件 + ├─channel.cpp Channel类的默认实现的头文件 + ├─codec + ├─connection 提供发送/接收protobuf消息的接口 + ├─net 网络连接相关源码, 完成收发字节数组的接口 + ├─socket.cpp Socket抽象类 + ├─socket_factory.cpp Socket工厂抽象类 + ├─raw_socket.cpp 基于linux原生socket,不提供通道加密功能 + ├─raw_socket_factory.cpp RawSocket的工厂类 + ├─presenter 封装了发送媒体数据到PresenterServer的功能 + ├─util 工具类相关源码 + ├─Makefile + ``` + + 如果修改了presenter\_message.proto,则需要在proto文件夹下执行以下命令编译proto文件: + + protoc presenter\_message.proto --cpp\_out=./ + + diff --git a/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/channel.h b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/channel.h new file mode 100644 index 0000000000000000000000000000000000000000..fec3fcf102de84a20e0ccb3cdcd395ee8bde51a6 --- /dev/null +++ b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/channel.h @@ -0,0 +1,184 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_CHANNEL_H_ +#define ASCENDDK_PRESENTER_AGENT_CHANNEL_H_ + +#include +#include +#include +#include + +#include "ascenddk/presenter/agent/errors.h" + +namespace google { +namespace protobuf { +class Message; +} +} + +namespace ascend { +namespace presenter { + +/** + * TLV + */ +struct Tlv { + int tag; + int length; + const char* value; +}; + +/** + * When a message has large fields, this can be used to avoid + * unnecessary memory copies + */ +struct PartialMessageWithTlvs { + const google::protobuf::Message* message; + std::vector tlv_list; +}; + +/** + * Deal with channel initialization + */ +class InitChannelHandler { + public: + InitChannelHandler() = default; + virtual ~InitChannelHandler() = default; + + /** + * @brief Create initialize request + * @return initialize request + */ + virtual google::protobuf::Message* CreateInitRequest() = 0; + + /** + * @brief check the response + * @param [in] response response + * @return check result + */ + virtual bool CheckInitResponse(const google::protobuf::Message& response) = 0; +}; + +/** + * @brief General channel + */ +class Channel { + public: + virtual ~Channel() = default; + + /** + * @brief Open channel + * @return PresenterErrorCode + */ + virtual PresenterErrorCode Open() = 0; + + /** + * @brief send message to server + * @param [in] message message + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const google::protobuf::Message& message) = 0; + + /** + * @brief send message to server + * @param [in] message message + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const PartialMessageWithTlvs& message) = 0; + + /** + * @brief send message to server and read the response + * @param [in] message message + * @pararm [out] response response + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const google::protobuf::Message& message, + std::unique_ptr& response) = 0; + + /** + * @brief send message to server and read the response + * @param [in] message message + * @pararm [out] response response + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const PartialMessageWithTlvs& message, + std::unique_ptr& response) = 0; + + /** + * @brief recevice a response + * @param [out] response response + * @return PresenterErrorCode + */ + virtual PresenterErrorCode ReceiveMessage( + std::unique_ptr& response) = 0; + + /** + * @brief Get the description of the channel, can be used for logging + * @return description + */ + virtual const std::string& GetDescription() const = 0; +}; + +/** + * Channel Factory + */ +class ChannelFactory { + public: + /** + * @brief create a channel + * @param [in] host_ip host IP of server + * @param [in] port port of server + * @return pointer to channel + */ + static Channel* NewChannel(const std::string& host_ip, uint16_t port); + + /** + * @brief create a channel + * @param [in] host_ip host IP of server + * @param [in] port port of server + * @param [in] handler init handler + * @return pointer to channel + */ + static Channel* NewChannel(const std::string& host_ip, uint16_t port, + std::shared_ptr handler); +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_CHANNEL_H_ */ diff --git a/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/errors.h b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/errors.h new file mode 100644 index 0000000000000000000000000000000000000000..07589f7210380d96570606af6d6d82ef1fcfedd3 --- /dev/null +++ b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/errors.h @@ -0,0 +1,84 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_ERRORS_H_ +#define ASCENDDK_PRESENTER_AGENT_ERRORS_H_ + +namespace ascend { +namespace presenter { + +/** + * PresenterErrorCode + */ +enum class PresenterErrorCode { + // Success, no error + kNone = 0, + + // parameter check error + kInvalidParam, + + // Connect to presenter server error + kConnection, + + // SSL certification error + kSsl, + + // Encode/Decode message error + kCodec, + + // The given channel name is not created in server + kNoSuchChannel, + + // The given channel is opened by another process + kChannelAlreadyOpened, + + // Presenter server return unknown error + kServerReturnedUnknownError, + + // Alloc object error + kBadAlloc, + + // App returned error + kAppDefinedError, + + // Timeout + kSocketTimeout, + + // Uncategorized error + kOther, +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_ERRORS_H_ */ diff --git a/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/presenter_channel.h b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/presenter_channel.h new file mode 100644 index 0000000000000000000000000000000000000000..0a6399a0d393a6d2e587f654fe8ab7f0fa7aa3e8 --- /dev/null +++ b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/presenter_channel.h @@ -0,0 +1,85 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_PRESENTER_CHANNEL_H_ +#define ASCENDDK_PRESENTER_AGENT_PRESENTER_CHANNEL_H_ + +#include "ascenddk/presenter/agent/channel.h" +#include "ascenddk/presenter/agent/errors.h" +#include "ascenddk/presenter/agent/presenter_types.h" + +namespace ascend { +namespace presenter { + +/** + * @brief Open a channel to presenter server + * @param [out] channel channel must be a NULL pointer, + * and it will point to an opened channel if + * open successfully + * @param [in] param parameters for opening a channel + * @return PresenterErrorCode + */ +PresenterErrorCode OpenChannel(Channel *&channel, + const OpenChannelParam ¶m); +/** + * @brief Open a channel to presenter server by config file + * @param [out] channel channel must be a NULL pointer, + * and it will point to an opened channel if + * open successfully + * @configFile [in] param config file of channel configuration + * @return PresenterErrorCode + */ +PresenterErrorCode OpenChannelByConfig(Channel*& channel, + const char* configFile); + +/** + * @brief Send the image to server for display through the given channel + * @param [in] channel the channel to send the image with + * @param [in] image the image to display + * @return PresenterErrorCode + */ +PresenterErrorCode PresentImage(Channel *channel, const ImageFrame &image); + +/** + * @brief Send the image message to server for display through the given channel + * @param [in] channel the channel to send the image with + * @param [in] message a protobuf message + * @return PresenterErrorCode + */ +PresenterErrorCode SendMessage(Channel *channel, + const google::protobuf::Message& message); + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_PRESENTER_CHANNEL_H_ */ diff --git a/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/presenter_types.h b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/presenter_types.h new file mode 100644 index 0000000000000000000000000000000000000000..3e38af763d4e9a842a30969ca8fc01cf090fece5 --- /dev/null +++ b/cplusplus/common/presenteragent/include/ascenddk/presenter/agent/presenter_types.h @@ -0,0 +1,103 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#ifndef ASCENDDK_PRESENTER_AGENT_DATA_TYPES_H_ +#define ASCENDDK_PRESENTER_AGENT_DATA_TYPES_H_ + +#include +#include +#include + +namespace ascend { +namespace presenter { + +/** + * ContentType + */ +enum class ContentType { + // Image + kImage = 0, + + // Video + kVideo = 1, + + // Reserved content type, do not use this + kReserved = 127, +}; + +/** + * ImageFormat + */ +enum class ImageFormat { + // JPEG + kJpeg = 0, + + // Reserved format, do not use this + kReserved = 127, +}; + +/** + * OpenChannelParam + */ +struct OpenChannelParam { + std::string host_ip; + std::uint16_t port; + std::string channel_name; + ContentType content_type; +}; + +struct Point { + std::uint32_t x; + std::uint32_t y; +}; + +struct DetectionResult { + Point lt; //The coordinate of left top point + Point rb; //The coordinate of the right bottom point + std::string result_text; // Face:xx% +}; +/** + * ImageFrame + */ +struct ImageFrame { + ImageFormat format; + std::uint32_t width; + std::uint32_t height; + std::uint32_t size; + unsigned char *data; + std::vector detection_results; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_DATA_TYPES_H_ */ diff --git a/cplusplus/common/presenteragent/proto/presenter_message.pb.cc b/cplusplus/common/presenteragent/proto/presenter_message.pb.cc new file mode 100644 index 0000000000000000000000000000000000000000..2f02c1074463830afda9801b418eaec65928e152 --- /dev/null +++ b/cplusplus/common/presenteragent/proto/presenter_message.pb.cc @@ -0,0 +1,2723 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: presenter_message.proto + +#include "presenter_message.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) +#include +extern PROTOBUF_INTERNAL_EXPORT_presenter_5fmessage_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_Coordinate_presenter_5fmessage_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_presenter_5fmessage_2eproto ::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_Rectangle_Attr_presenter_5fmessage_2eproto; +namespace ascend { +namespace presenter { +namespace proto { +class OpenChannelRequestDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _OpenChannelRequest_default_instance_; +class OpenChannelResponseDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _OpenChannelResponse_default_instance_; +class HeartbeatMessageDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _HeartbeatMessage_default_instance_; +class CoordinateDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _Coordinate_default_instance_; +class Rectangle_AttrDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _Rectangle_Attr_default_instance_; +class PresentImageRequestDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _PresentImageRequest_default_instance_; +class PresentImageResponseDefaultTypeInternal { + public: + ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed _instance; +} _PresentImageResponse_default_instance_; +} // namespace proto +} // namespace presenter +} // namespace ascend +static void InitDefaultsscc_info_Coordinate_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_Coordinate_default_instance_; + new (ptr) ::ascend::presenter::proto::Coordinate(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::Coordinate::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_Coordinate_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_Coordinate_presenter_5fmessage_2eproto}, {}}; + +static void InitDefaultsscc_info_HeartbeatMessage_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_HeartbeatMessage_default_instance_; + new (ptr) ::ascend::presenter::proto::HeartbeatMessage(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::HeartbeatMessage::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_HeartbeatMessage_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_HeartbeatMessage_presenter_5fmessage_2eproto}, {}}; + +static void InitDefaultsscc_info_OpenChannelRequest_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_OpenChannelRequest_default_instance_; + new (ptr) ::ascend::presenter::proto::OpenChannelRequest(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::OpenChannelRequest::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_OpenChannelRequest_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_OpenChannelRequest_presenter_5fmessage_2eproto}, {}}; + +static void InitDefaultsscc_info_OpenChannelResponse_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_OpenChannelResponse_default_instance_; + new (ptr) ::ascend::presenter::proto::OpenChannelResponse(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::OpenChannelResponse::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_OpenChannelResponse_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_OpenChannelResponse_presenter_5fmessage_2eproto}, {}}; + +static void InitDefaultsscc_info_PresentImageRequest_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_PresentImageRequest_default_instance_; + new (ptr) ::ascend::presenter::proto::PresentImageRequest(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::PresentImageRequest::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_PresentImageRequest_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_PresentImageRequest_presenter_5fmessage_2eproto}, { + &scc_info_Rectangle_Attr_presenter_5fmessage_2eproto.base,}}; + +static void InitDefaultsscc_info_PresentImageResponse_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_PresentImageResponse_default_instance_; + new (ptr) ::ascend::presenter::proto::PresentImageResponse(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::PresentImageResponse::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<0> scc_info_PresentImageResponse_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsscc_info_PresentImageResponse_presenter_5fmessage_2eproto}, {}}; + +static void InitDefaultsscc_info_Rectangle_Attr_presenter_5fmessage_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::ascend::presenter::proto::_Rectangle_Attr_default_instance_; + new (ptr) ::ascend::presenter::proto::Rectangle_Attr(); + ::PROTOBUF_NAMESPACE_ID::internal::OnShutdownDestroyMessage(ptr); + } + ::ascend::presenter::proto::Rectangle_Attr::InitAsDefaultInstance(); +} + +::PROTOBUF_NAMESPACE_ID::internal::SCCInfo<1> scc_info_Rectangle_Attr_presenter_5fmessage_2eproto = + {{ATOMIC_VAR_INIT(::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsscc_info_Rectangle_Attr_presenter_5fmessage_2eproto}, { + &scc_info_Coordinate_presenter_5fmessage_2eproto.base,}}; + +static ::PROTOBUF_NAMESPACE_ID::Metadata file_level_metadata_presenter_5fmessage_2eproto[7]; +static const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* file_level_enum_descriptors_presenter_5fmessage_2eproto[4]; +static constexpr ::PROTOBUF_NAMESPACE_ID::ServiceDescriptor const** file_level_service_descriptors_presenter_5fmessage_2eproto = nullptr; + +const ::PROTOBUF_NAMESPACE_ID::uint32 TableStruct_presenter_5fmessage_2eproto::offsets[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::OpenChannelRequest, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::OpenChannelRequest, channel_name_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::OpenChannelRequest, content_type_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::OpenChannelResponse, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::OpenChannelResponse, error_code_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::OpenChannelResponse, error_message_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::HeartbeatMessage, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Coordinate, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Coordinate, x_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Coordinate, y_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Rectangle_Attr, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Rectangle_Attr, left_top_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Rectangle_Attr, right_bottom_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::Rectangle_Attr, label_text_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageRequest, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageRequest, format_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageRequest, width_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageRequest, height_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageRequest, data_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageRequest, rectangle_list_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageResponse, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageResponse, error_code_), + PROTOBUF_FIELD_OFFSET(::ascend::presenter::proto::PresentImageResponse, error_message_), +}; +static const ::PROTOBUF_NAMESPACE_ID::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { + { 0, -1, sizeof(::ascend::presenter::proto::OpenChannelRequest)}, + { 7, -1, sizeof(::ascend::presenter::proto::OpenChannelResponse)}, + { 14, -1, sizeof(::ascend::presenter::proto::HeartbeatMessage)}, + { 19, -1, sizeof(::ascend::presenter::proto::Coordinate)}, + { 26, -1, sizeof(::ascend::presenter::proto::Rectangle_Attr)}, + { 34, -1, sizeof(::ascend::presenter::proto::PresentImageRequest)}, + { 44, -1, sizeof(::ascend::presenter::proto::PresentImageResponse)}, +}; + +static ::PROTOBUF_NAMESPACE_ID::Message const * const file_default_instances[] = { + reinterpret_cast(&::ascend::presenter::proto::_OpenChannelRequest_default_instance_), + reinterpret_cast(&::ascend::presenter::proto::_OpenChannelResponse_default_instance_), + reinterpret_cast(&::ascend::presenter::proto::_HeartbeatMessage_default_instance_), + reinterpret_cast(&::ascend::presenter::proto::_Coordinate_default_instance_), + reinterpret_cast(&::ascend::presenter::proto::_Rectangle_Attr_default_instance_), + reinterpret_cast(&::ascend::presenter::proto::_PresentImageRequest_default_instance_), + reinterpret_cast(&::ascend::presenter::proto::_PresentImageResponse_default_instance_), +}; + +const char descriptor_table_protodef_presenter_5fmessage_2eproto[] = + "\n\027presenter_message.proto\022\026ascend.presen" + "ter.proto\"l\n\022OpenChannelRequest\022\024\n\014chann" + "el_name\030\001 \001(\t\022@\n\014content_type\030\002 \001(\0162*.as" + "cend.presenter.proto.ChannelContentType\"" + "n\n\023OpenChannelResponse\022@\n\nerror_code\030\001 \001" + "(\0162,.ascend.presenter.proto.OpenChannelE" + "rrorCode\022\025\n\rerror_message\030\002 \001(\t\"\022\n\020Heart" + "beatMessage\"\"\n\nCoordinate\022\t\n\001x\030\001 \001(\r\022\t\n\001" + "y\030\002 \001(\r\"\224\001\n\016Rectangle_Attr\0224\n\010left_top\030\001" + " \001(\0132\".ascend.presenter.proto.Coordinate" + "\0228\n\014right_bottom\030\002 \001(\0132\".ascend.presente" + "r.proto.Coordinate\022\022\n\nlabel_text\030\003 \001(\t\"\267" + "\001\n\023PresentImageRequest\0223\n\006format\030\001 \001(\0162#" + ".ascend.presenter.proto.ImageFormat\022\r\n\005w" + "idth\030\002 \001(\r\022\016\n\006height\030\003 \001(\r\022\014\n\004data\030\004 \001(\014" + "\022>\n\016rectangle_list\030\005 \003(\0132&.ascend.presen" + "ter.proto.Rectangle_Attr\"o\n\024PresentImage" + "Response\022@\n\nerror_code\030\001 \001(\0162,.ascend.pr" + "esenter.proto.PresentDataErrorCode\022\025\n\rer" + "ror_message\030\002 \001(\t*\245\001\n\024OpenChannelErrorCo" + "de\022\031\n\025kOpenChannelErrorNone\020\000\022\"\n\036kOpenCh" + "annelErrorNoSuchChannel\020\001\022)\n%kOpenChanne" + "lErrorChannelAlreadyOpened\020\002\022#\n\026kOpenCha" + "nnelErrorOther\020\377\377\377\377\377\377\377\377\377\001*P\n\022ChannelCont" + "entType\022\034\n\030kChannelContentTypeImage\020\000\022\034\n" + "\030kChannelContentTypeVideo\020\001*#\n\013ImageForm" + "at\022\024\n\020kImageFormatJpeg\020\000*\244\001\n\024PresentData" + "ErrorCode\022\031\n\025kPresentDataErrorNone\020\000\022$\n " + "kPresentDataErrorUnsupportedType\020\001\022&\n\"kP" + "resentDataErrorUnsupportedFormat\020\002\022#\n\026kP" + "resentDataErrorOther\020\377\377\377\377\377\377\377\377\377\001b\006proto3" + ; +static const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable*const descriptor_table_presenter_5fmessage_2eproto_deps[1] = { +}; +static ::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase*const descriptor_table_presenter_5fmessage_2eproto_sccs[7] = { + &scc_info_Coordinate_presenter_5fmessage_2eproto.base, + &scc_info_HeartbeatMessage_presenter_5fmessage_2eproto.base, + &scc_info_OpenChannelRequest_presenter_5fmessage_2eproto.base, + &scc_info_OpenChannelResponse_presenter_5fmessage_2eproto.base, + &scc_info_PresentImageRequest_presenter_5fmessage_2eproto.base, + &scc_info_PresentImageResponse_presenter_5fmessage_2eproto.base, + &scc_info_Rectangle_Attr_presenter_5fmessage_2eproto.base, +}; +static ::PROTOBUF_NAMESPACE_ID::internal::once_flag descriptor_table_presenter_5fmessage_2eproto_once; +static bool descriptor_table_presenter_5fmessage_2eproto_initialized = false; +const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_presenter_5fmessage_2eproto = { + &descriptor_table_presenter_5fmessage_2eproto_initialized, descriptor_table_protodef_presenter_5fmessage_2eproto, "presenter_message.proto", 1239, + &descriptor_table_presenter_5fmessage_2eproto_once, descriptor_table_presenter_5fmessage_2eproto_sccs, descriptor_table_presenter_5fmessage_2eproto_deps, 7, 0, + schemas, file_default_instances, TableStruct_presenter_5fmessage_2eproto::offsets, + file_level_metadata_presenter_5fmessage_2eproto, 7, file_level_enum_descriptors_presenter_5fmessage_2eproto, file_level_service_descriptors_presenter_5fmessage_2eproto, +}; + +// Force running AddDescriptors() at dynamic initialization time. +static bool dynamic_init_dummy_presenter_5fmessage_2eproto = ( ::PROTOBUF_NAMESPACE_ID::internal::AddDescriptors(&descriptor_table_presenter_5fmessage_2eproto), true); +namespace ascend { +namespace presenter { +namespace proto { +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* OpenChannelErrorCode_descriptor() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&descriptor_table_presenter_5fmessage_2eproto); + return file_level_enum_descriptors_presenter_5fmessage_2eproto[0]; +} +bool OpenChannelErrorCode_IsValid(int value) { + switch (value) { + case -1: + case 0: + case 1: + case 2: + return true; + default: + return false; + } +} + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ChannelContentType_descriptor() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&descriptor_table_presenter_5fmessage_2eproto); + return file_level_enum_descriptors_presenter_5fmessage_2eproto[1]; +} +bool ChannelContentType_IsValid(int value) { + switch (value) { + case 0: + case 1: + return true; + default: + return false; + } +} + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ImageFormat_descriptor() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&descriptor_table_presenter_5fmessage_2eproto); + return file_level_enum_descriptors_presenter_5fmessage_2eproto[2]; +} +bool ImageFormat_IsValid(int value) { + switch (value) { + case 0: + return true; + default: + return false; + } +} + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* PresentDataErrorCode_descriptor() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&descriptor_table_presenter_5fmessage_2eproto); + return file_level_enum_descriptors_presenter_5fmessage_2eproto[3]; +} +bool PresentDataErrorCode_IsValid(int value) { + switch (value) { + case -1: + case 0: + case 1: + case 2: + return true; + default: + return false; + } +} + + +// =================================================================== + +void OpenChannelRequest::InitAsDefaultInstance() { +} +class OpenChannelRequest::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int OpenChannelRequest::kChannelNameFieldNumber; +const int OpenChannelRequest::kContentTypeFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +OpenChannelRequest::OpenChannelRequest() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.OpenChannelRequest) +} +OpenChannelRequest::OpenChannelRequest(const OpenChannelRequest& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + channel_name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (from.channel_name().size() > 0) { + channel_name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.channel_name_); + } + content_type_ = from.content_type_; + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.OpenChannelRequest) +} + +void OpenChannelRequest::SharedCtor() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_OpenChannelRequest_presenter_5fmessage_2eproto.base); + channel_name_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + content_type_ = 0; +} + +OpenChannelRequest::~OpenChannelRequest() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.OpenChannelRequest) + SharedDtor(); +} + +void OpenChannelRequest::SharedDtor() { + channel_name_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} + +void OpenChannelRequest::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const OpenChannelRequest& OpenChannelRequest::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_OpenChannelRequest_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void OpenChannelRequest::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.OpenChannelRequest) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + channel_name_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + content_type_ = 0; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* OpenChannelRequest::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + // string channel_name = 1; + case 1: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { + ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParserUTF8(mutable_channel_name(), ptr, ctx, "ascend.presenter.proto.OpenChannelRequest.channel_name"); + CHK_(ptr); + } else goto handle_unusual; + continue; + // .ascend.presenter.proto.ChannelContentType content_type = 2; + case 2: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { + ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + set_content_type(static_cast<::ascend::presenter::proto::ChannelContentType>(val)); + } else goto handle_unusual; + continue; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool OpenChannelRequest::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.OpenChannelRequest) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // string channel_name = 1; + case 1: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( + input, this->mutable_channel_name())); + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->channel_name().data(), static_cast(this->channel_name().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, + "ascend.presenter.proto.OpenChannelRequest.channel_name")); + } else { + goto handle_unusual; + } + break; + } + + // .ascend.presenter.proto.ChannelContentType content_type = 2; + case 2: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { + int value = 0; + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + set_content_type(static_cast< ::ascend::presenter::proto::ChannelContentType >(value)); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.OpenChannelRequest) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.OpenChannelRequest) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void OpenChannelRequest::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.OpenChannelRequest) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string channel_name = 1; + if (this->channel_name().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->channel_name().data(), static_cast(this->channel_name().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.OpenChannelRequest.channel_name"); + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->channel_name(), output); + } + + // .ascend.presenter.proto.ChannelContentType content_type = 2; + if (this->content_type() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( + 2, this->content_type(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.OpenChannelRequest) +} + +::PROTOBUF_NAMESPACE_ID::uint8* OpenChannelRequest::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.OpenChannelRequest) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string channel_name = 1; + if (this->channel_name().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->channel_name().data(), static_cast(this->channel_name().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.OpenChannelRequest.channel_name"); + target = + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringToArray( + 1, this->channel_name(), target); + } + + // .ascend.presenter.proto.ChannelContentType content_type = 2; + if (this->content_type() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnumToArray( + 2, this->content_type(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.OpenChannelRequest) + return target; +} + +size_t OpenChannelRequest::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.OpenChannelRequest) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string channel_name = 1; + if (this->channel_name().size() > 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( + this->channel_name()); + } + + // .ascend.presenter.proto.ChannelContentType content_type = 2; + if (this->content_type() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->content_type()); + } + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void OpenChannelRequest::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.OpenChannelRequest) + GOOGLE_DCHECK_NE(&from, this); + const OpenChannelRequest* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.OpenChannelRequest) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.OpenChannelRequest) + MergeFrom(*source); + } +} + +void OpenChannelRequest::MergeFrom(const OpenChannelRequest& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.OpenChannelRequest) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.channel_name().size() > 0) { + + channel_name_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.channel_name_); + } + if (from.content_type() != 0) { + set_content_type(from.content_type()); + } +} + +void OpenChannelRequest::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.OpenChannelRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void OpenChannelRequest::CopyFrom(const OpenChannelRequest& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.OpenChannelRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool OpenChannelRequest::IsInitialized() const { + return true; +} + +void OpenChannelRequest::Swap(OpenChannelRequest* other) { + if (other == this) return; + InternalSwap(other); +} +void OpenChannelRequest::InternalSwap(OpenChannelRequest* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + channel_name_.Swap(&other->channel_name_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(content_type_, other->content_type_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata OpenChannelRequest::GetMetadata() const { + return GetMetadataStatic(); +} + + +// =================================================================== + +void OpenChannelResponse::InitAsDefaultInstance() { +} +class OpenChannelResponse::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int OpenChannelResponse::kErrorCodeFieldNumber; +const int OpenChannelResponse::kErrorMessageFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +OpenChannelResponse::OpenChannelResponse() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.OpenChannelResponse) +} +OpenChannelResponse::OpenChannelResponse(const OpenChannelResponse& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + error_message_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (from.error_message().size() > 0) { + error_message_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.error_message_); + } + error_code_ = from.error_code_; + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.OpenChannelResponse) +} + +void OpenChannelResponse::SharedCtor() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_OpenChannelResponse_presenter_5fmessage_2eproto.base); + error_message_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + error_code_ = 0; +} + +OpenChannelResponse::~OpenChannelResponse() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.OpenChannelResponse) + SharedDtor(); +} + +void OpenChannelResponse::SharedDtor() { + error_message_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} + +void OpenChannelResponse::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const OpenChannelResponse& OpenChannelResponse::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_OpenChannelResponse_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void OpenChannelResponse::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.OpenChannelResponse) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + error_message_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + error_code_ = 0; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* OpenChannelResponse::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + // .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; + case 1: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { + ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + set_error_code(static_cast<::ascend::presenter::proto::OpenChannelErrorCode>(val)); + } else goto handle_unusual; + continue; + // string error_message = 2; + case 2: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { + ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParserUTF8(mutable_error_message(), ptr, ctx, "ascend.presenter.proto.OpenChannelResponse.error_message"); + CHK_(ptr); + } else goto handle_unusual; + continue; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool OpenChannelResponse::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.OpenChannelResponse) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; + case 1: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { + int value = 0; + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + set_error_code(static_cast< ::ascend::presenter::proto::OpenChannelErrorCode >(value)); + } else { + goto handle_unusual; + } + break; + } + + // string error_message = 2; + case 2: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( + input, this->mutable_error_message())); + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->error_message().data(), static_cast(this->error_message().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, + "ascend.presenter.proto.OpenChannelResponse.error_message")); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.OpenChannelResponse) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.OpenChannelResponse) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void OpenChannelResponse::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.OpenChannelResponse) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; + if (this->error_code() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( + 1, this->error_code(), output); + } + + // string error_message = 2; + if (this->error_message().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->error_message().data(), static_cast(this->error_message().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.OpenChannelResponse.error_message"); + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( + 2, this->error_message(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.OpenChannelResponse) +} + +::PROTOBUF_NAMESPACE_ID::uint8* OpenChannelResponse::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.OpenChannelResponse) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; + if (this->error_code() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnumToArray( + 1, this->error_code(), target); + } + + // string error_message = 2; + if (this->error_message().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->error_message().data(), static_cast(this->error_message().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.OpenChannelResponse.error_message"); + target = + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringToArray( + 2, this->error_message(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.OpenChannelResponse) + return target; +} + +size_t OpenChannelResponse::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.OpenChannelResponse) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string error_message = 2; + if (this->error_message().size() > 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( + this->error_message()); + } + + // .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; + if (this->error_code() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->error_code()); + } + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void OpenChannelResponse::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.OpenChannelResponse) + GOOGLE_DCHECK_NE(&from, this); + const OpenChannelResponse* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.OpenChannelResponse) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.OpenChannelResponse) + MergeFrom(*source); + } +} + +void OpenChannelResponse::MergeFrom(const OpenChannelResponse& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.OpenChannelResponse) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.error_message().size() > 0) { + + error_message_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.error_message_); + } + if (from.error_code() != 0) { + set_error_code(from.error_code()); + } +} + +void OpenChannelResponse::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.OpenChannelResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void OpenChannelResponse::CopyFrom(const OpenChannelResponse& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.OpenChannelResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool OpenChannelResponse::IsInitialized() const { + return true; +} + +void OpenChannelResponse::Swap(OpenChannelResponse* other) { + if (other == this) return; + InternalSwap(other); +} +void OpenChannelResponse::InternalSwap(OpenChannelResponse* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + error_message_.Swap(&other->error_message_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(error_code_, other->error_code_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata OpenChannelResponse::GetMetadata() const { + return GetMetadataStatic(); +} + + +// =================================================================== + +void HeartbeatMessage::InitAsDefaultInstance() { +} +class HeartbeatMessage::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +HeartbeatMessage::HeartbeatMessage() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.HeartbeatMessage) +} +HeartbeatMessage::HeartbeatMessage(const HeartbeatMessage& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.HeartbeatMessage) +} + +void HeartbeatMessage::SharedCtor() { +} + +HeartbeatMessage::~HeartbeatMessage() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.HeartbeatMessage) + SharedDtor(); +} + +void HeartbeatMessage::SharedDtor() { +} + +void HeartbeatMessage::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const HeartbeatMessage& HeartbeatMessage::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_HeartbeatMessage_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void HeartbeatMessage::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.HeartbeatMessage) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* HeartbeatMessage::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + default: { + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool HeartbeatMessage::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.HeartbeatMessage) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.HeartbeatMessage) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.HeartbeatMessage) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void HeartbeatMessage::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.HeartbeatMessage) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.HeartbeatMessage) +} + +::PROTOBUF_NAMESPACE_ID::uint8* HeartbeatMessage::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.HeartbeatMessage) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.HeartbeatMessage) + return target; +} + +size_t HeartbeatMessage::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.HeartbeatMessage) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void HeartbeatMessage::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.HeartbeatMessage) + GOOGLE_DCHECK_NE(&from, this); + const HeartbeatMessage* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.HeartbeatMessage) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.HeartbeatMessage) + MergeFrom(*source); + } +} + +void HeartbeatMessage::MergeFrom(const HeartbeatMessage& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.HeartbeatMessage) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + +} + +void HeartbeatMessage::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.HeartbeatMessage) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void HeartbeatMessage::CopyFrom(const HeartbeatMessage& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.HeartbeatMessage) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool HeartbeatMessage::IsInitialized() const { + return true; +} + +void HeartbeatMessage::Swap(HeartbeatMessage* other) { + if (other == this) return; + InternalSwap(other); +} +void HeartbeatMessage::InternalSwap(HeartbeatMessage* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata HeartbeatMessage::GetMetadata() const { + return GetMetadataStatic(); +} + + +// =================================================================== + +void Coordinate::InitAsDefaultInstance() { +} +class Coordinate::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int Coordinate::kXFieldNumber; +const int Coordinate::kYFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +Coordinate::Coordinate() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.Coordinate) +} +Coordinate::Coordinate(const Coordinate& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::memcpy(&x_, &from.x_, + static_cast(reinterpret_cast(&y_) - + reinterpret_cast(&x_)) + sizeof(y_)); + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.Coordinate) +} + +void Coordinate::SharedCtor() { + ::memset(&x_, 0, static_cast( + reinterpret_cast(&y_) - + reinterpret_cast(&x_)) + sizeof(y_)); +} + +Coordinate::~Coordinate() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.Coordinate) + SharedDtor(); +} + +void Coordinate::SharedDtor() { +} + +void Coordinate::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const Coordinate& Coordinate::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_Coordinate_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void Coordinate::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.Coordinate) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + ::memset(&x_, 0, static_cast( + reinterpret_cast(&y_) - + reinterpret_cast(&x_)) + sizeof(y_)); + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* Coordinate::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + // uint32 x = 1; + case 1: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { + x_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + } else goto handle_unusual; + continue; + // uint32 y = 2; + case 2: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { + y_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + } else goto handle_unusual; + continue; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool Coordinate::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.Coordinate) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // uint32 x = 1; + case 1: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { + + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( + input, &x_))); + } else { + goto handle_unusual; + } + break; + } + + // uint32 y = 2; + case 2: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { + + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( + input, &y_))); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.Coordinate) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.Coordinate) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void Coordinate::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.Coordinate) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // uint32 x = 1; + if (this->x() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(1, this->x(), output); + } + + // uint32 y = 2; + if (this->y() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->y(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.Coordinate) +} + +::PROTOBUF_NAMESPACE_ID::uint8* Coordinate::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.Coordinate) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // uint32 x = 1; + if (this->x() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32ToArray(1, this->x(), target); + } + + // uint32 y = 2; + if (this->y() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32ToArray(2, this->y(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.Coordinate) + return target; +} + +size_t Coordinate::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.Coordinate) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // uint32 x = 1; + if (this->x() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( + this->x()); + } + + // uint32 y = 2; + if (this->y() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( + this->y()); + } + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void Coordinate::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.Coordinate) + GOOGLE_DCHECK_NE(&from, this); + const Coordinate* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.Coordinate) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.Coordinate) + MergeFrom(*source); + } +} + +void Coordinate::MergeFrom(const Coordinate& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.Coordinate) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.x() != 0) { + set_x(from.x()); + } + if (from.y() != 0) { + set_y(from.y()); + } +} + +void Coordinate::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.Coordinate) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void Coordinate::CopyFrom(const Coordinate& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.Coordinate) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool Coordinate::IsInitialized() const { + return true; +} + +void Coordinate::Swap(Coordinate* other) { + if (other == this) return; + InternalSwap(other); +} +void Coordinate::InternalSwap(Coordinate* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(x_, other->x_); + swap(y_, other->y_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata Coordinate::GetMetadata() const { + return GetMetadataStatic(); +} + + +// =================================================================== + +void Rectangle_Attr::InitAsDefaultInstance() { + ::ascend::presenter::proto::_Rectangle_Attr_default_instance_._instance.get_mutable()->left_top_ = const_cast< ::ascend::presenter::proto::Coordinate*>( + ::ascend::presenter::proto::Coordinate::internal_default_instance()); + ::ascend::presenter::proto::_Rectangle_Attr_default_instance_._instance.get_mutable()->right_bottom_ = const_cast< ::ascend::presenter::proto::Coordinate*>( + ::ascend::presenter::proto::Coordinate::internal_default_instance()); +} +class Rectangle_Attr::HasBitSetters { + public: + static const ::ascend::presenter::proto::Coordinate& left_top(const Rectangle_Attr* msg); + static const ::ascend::presenter::proto::Coordinate& right_bottom(const Rectangle_Attr* msg); +}; + +const ::ascend::presenter::proto::Coordinate& +Rectangle_Attr::HasBitSetters::left_top(const Rectangle_Attr* msg) { + return *msg->left_top_; +} +const ::ascend::presenter::proto::Coordinate& +Rectangle_Attr::HasBitSetters::right_bottom(const Rectangle_Attr* msg) { + return *msg->right_bottom_; +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int Rectangle_Attr::kLeftTopFieldNumber; +const int Rectangle_Attr::kRightBottomFieldNumber; +const int Rectangle_Attr::kLabelTextFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +Rectangle_Attr::Rectangle_Attr() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.Rectangle_Attr) +} +Rectangle_Attr::Rectangle_Attr(const Rectangle_Attr& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + label_text_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (from.label_text().size() > 0) { + label_text_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.label_text_); + } + if (from.has_left_top()) { + left_top_ = new ::ascend::presenter::proto::Coordinate(*from.left_top_); + } else { + left_top_ = nullptr; + } + if (from.has_right_bottom()) { + right_bottom_ = new ::ascend::presenter::proto::Coordinate(*from.right_bottom_); + } else { + right_bottom_ = nullptr; + } + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.Rectangle_Attr) +} + +void Rectangle_Attr::SharedCtor() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_Rectangle_Attr_presenter_5fmessage_2eproto.base); + label_text_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + ::memset(&left_top_, 0, static_cast( + reinterpret_cast(&right_bottom_) - + reinterpret_cast(&left_top_)) + sizeof(right_bottom_)); +} + +Rectangle_Attr::~Rectangle_Attr() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.Rectangle_Attr) + SharedDtor(); +} + +void Rectangle_Attr::SharedDtor() { + label_text_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (this != internal_default_instance()) delete left_top_; + if (this != internal_default_instance()) delete right_bottom_; +} + +void Rectangle_Attr::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const Rectangle_Attr& Rectangle_Attr::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_Rectangle_Attr_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void Rectangle_Attr::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.Rectangle_Attr) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + label_text_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (GetArenaNoVirtual() == nullptr && left_top_ != nullptr) { + delete left_top_; + } + left_top_ = nullptr; + if (GetArenaNoVirtual() == nullptr && right_bottom_ != nullptr) { + delete right_bottom_; + } + right_bottom_ = nullptr; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* Rectangle_Attr::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + // .ascend.presenter.proto.Coordinate left_top = 1; + case 1: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 10)) { + ptr = ctx->ParseMessage(mutable_left_top(), ptr); + CHK_(ptr); + } else goto handle_unusual; + continue; + // .ascend.presenter.proto.Coordinate right_bottom = 2; + case 2: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { + ptr = ctx->ParseMessage(mutable_right_bottom(), ptr); + CHK_(ptr); + } else goto handle_unusual; + continue; + // string label_text = 3; + case 3: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 26)) { + ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParserUTF8(mutable_label_text(), ptr, ctx, "ascend.presenter.proto.Rectangle_Attr.label_text"); + CHK_(ptr); + } else goto handle_unusual; + continue; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool Rectangle_Attr::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.Rectangle_Attr) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .ascend.presenter.proto.Coordinate left_top = 1; + case 1: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (10 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( + input, mutable_left_top())); + } else { + goto handle_unusual; + } + break; + } + + // .ascend.presenter.proto.Coordinate right_bottom = 2; + case 2: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( + input, mutable_right_bottom())); + } else { + goto handle_unusual; + } + break; + } + + // string label_text = 3; + case 3: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (26 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( + input, this->mutable_label_text())); + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->label_text().data(), static_cast(this->label_text().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, + "ascend.presenter.proto.Rectangle_Attr.label_text")); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.Rectangle_Attr) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.Rectangle_Attr) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void Rectangle_Attr::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.Rectangle_Attr) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.Coordinate left_top = 1; + if (this->has_left_top()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, HasBitSetters::left_top(this), output); + } + + // .ascend.presenter.proto.Coordinate right_bottom = 2; + if (this->has_right_bottom()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, HasBitSetters::right_bottom(this), output); + } + + // string label_text = 3; + if (this->label_text().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->label_text().data(), static_cast(this->label_text().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.Rectangle_Attr.label_text"); + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( + 3, this->label_text(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.Rectangle_Attr) +} + +::PROTOBUF_NAMESPACE_ID::uint8* Rectangle_Attr::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.Rectangle_Attr) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.Coordinate left_top = 1; + if (this->has_left_top()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite:: + InternalWriteMessageToArray( + 1, HasBitSetters::left_top(this), target); + } + + // .ascend.presenter.proto.Coordinate right_bottom = 2; + if (this->has_right_bottom()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite:: + InternalWriteMessageToArray( + 2, HasBitSetters::right_bottom(this), target); + } + + // string label_text = 3; + if (this->label_text().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->label_text().data(), static_cast(this->label_text().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.Rectangle_Attr.label_text"); + target = + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringToArray( + 3, this->label_text(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.Rectangle_Attr) + return target; +} + +size_t Rectangle_Attr::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.Rectangle_Attr) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string label_text = 3; + if (this->label_text().size() > 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( + this->label_text()); + } + + // .ascend.presenter.proto.Coordinate left_top = 1; + if (this->has_left_top()) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( + *left_top_); + } + + // .ascend.presenter.proto.Coordinate right_bottom = 2; + if (this->has_right_bottom()) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( + *right_bottom_); + } + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void Rectangle_Attr::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.Rectangle_Attr) + GOOGLE_DCHECK_NE(&from, this); + const Rectangle_Attr* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.Rectangle_Attr) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.Rectangle_Attr) + MergeFrom(*source); + } +} + +void Rectangle_Attr::MergeFrom(const Rectangle_Attr& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.Rectangle_Attr) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.label_text().size() > 0) { + + label_text_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.label_text_); + } + if (from.has_left_top()) { + mutable_left_top()->::ascend::presenter::proto::Coordinate::MergeFrom(from.left_top()); + } + if (from.has_right_bottom()) { + mutable_right_bottom()->::ascend::presenter::proto::Coordinate::MergeFrom(from.right_bottom()); + } +} + +void Rectangle_Attr::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.Rectangle_Attr) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void Rectangle_Attr::CopyFrom(const Rectangle_Attr& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.Rectangle_Attr) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool Rectangle_Attr::IsInitialized() const { + return true; +} + +void Rectangle_Attr::Swap(Rectangle_Attr* other) { + if (other == this) return; + InternalSwap(other); +} +void Rectangle_Attr::InternalSwap(Rectangle_Attr* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + label_text_.Swap(&other->label_text_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(left_top_, other->left_top_); + swap(right_bottom_, other->right_bottom_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata Rectangle_Attr::GetMetadata() const { + return GetMetadataStatic(); +} + + +// =================================================================== + +void PresentImageRequest::InitAsDefaultInstance() { +} +class PresentImageRequest::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int PresentImageRequest::kFormatFieldNumber; +const int PresentImageRequest::kWidthFieldNumber; +const int PresentImageRequest::kHeightFieldNumber; +const int PresentImageRequest::kDataFieldNumber; +const int PresentImageRequest::kRectangleListFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +PresentImageRequest::PresentImageRequest() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.PresentImageRequest) +} +PresentImageRequest::PresentImageRequest(const PresentImageRequest& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr), + rectangle_list_(from.rectangle_list_) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + data_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (from.data().size() > 0) { + data_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.data_); + } + ::memcpy(&format_, &from.format_, + static_cast(reinterpret_cast(&height_) - + reinterpret_cast(&format_)) + sizeof(height_)); + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.PresentImageRequest) +} + +void PresentImageRequest::SharedCtor() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_PresentImageRequest_presenter_5fmessage_2eproto.base); + data_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + ::memset(&format_, 0, static_cast( + reinterpret_cast(&height_) - + reinterpret_cast(&format_)) + sizeof(height_)); +} + +PresentImageRequest::~PresentImageRequest() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.PresentImageRequest) + SharedDtor(); +} + +void PresentImageRequest::SharedDtor() { + data_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} + +void PresentImageRequest::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const PresentImageRequest& PresentImageRequest::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_PresentImageRequest_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void PresentImageRequest::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.PresentImageRequest) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + rectangle_list_.Clear(); + data_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + ::memset(&format_, 0, static_cast( + reinterpret_cast(&height_) - + reinterpret_cast(&format_)) + sizeof(height_)); + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* PresentImageRequest::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + // .ascend.presenter.proto.ImageFormat format = 1; + case 1: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { + ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + set_format(static_cast<::ascend::presenter::proto::ImageFormat>(val)); + } else goto handle_unusual; + continue; + // uint32 width = 2; + case 2: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 16)) { + width_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + } else goto handle_unusual; + continue; + // uint32 height = 3; + case 3: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 24)) { + height_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + } else goto handle_unusual; + continue; + // bytes data = 4; + case 4: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) { + ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParser(mutable_data(), ptr, ctx); + CHK_(ptr); + } else goto handle_unusual; + continue; + // repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; + case 5: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 42)) { + ptr -= 1; + do { + ptr += 1; + ptr = ctx->ParseMessage(add_rectangle_list(), ptr); + CHK_(ptr); + if (!ctx->DataAvailable(ptr)) break; + } while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 42); + } else goto handle_unusual; + continue; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool PresentImageRequest::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.PresentImageRequest) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .ascend.presenter.proto.ImageFormat format = 1; + case 1: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { + int value = 0; + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + set_format(static_cast< ::ascend::presenter::proto::ImageFormat >(value)); + } else { + goto handle_unusual; + } + break; + } + + // uint32 width = 2; + case 2: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (16 & 0xFF)) { + + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( + input, &width_))); + } else { + goto handle_unusual; + } + break; + } + + // uint32 height = 3; + case 3: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (24 & 0xFF)) { + + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + ::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>( + input, &height_))); + } else { + goto handle_unusual; + } + break; + } + + // bytes data = 4; + case 4: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadBytes( + input, this->mutable_data())); + } else { + goto handle_unusual; + } + break; + } + + // repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; + case 5: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (42 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadMessage( + input, add_rectangle_list())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.PresentImageRequest) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.PresentImageRequest) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void PresentImageRequest::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.PresentImageRequest) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.ImageFormat format = 1; + if (this->format() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( + 1, this->format(), output); + } + + // uint32 width = 2; + if (this->width() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(2, this->width(), output); + } + + // uint32 height = 3; + if (this->height() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32(3, this->height(), output); + } + + // bytes data = 4; + if (this->data().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesMaybeAliased( + 4, this->data(), output); + } + + // repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; + for (unsigned int i = 0, + n = static_cast(this->rectangle_list_size()); i < n; i++) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteMessageMaybeToArray( + 5, + this->rectangle_list(static_cast(i)), + output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.PresentImageRequest) +} + +::PROTOBUF_NAMESPACE_ID::uint8* PresentImageRequest::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.PresentImageRequest) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.ImageFormat format = 1; + if (this->format() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnumToArray( + 1, this->format(), target); + } + + // uint32 width = 2; + if (this->width() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32ToArray(2, this->width(), target); + } + + // uint32 height = 3; + if (this->height() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32ToArray(3, this->height(), target); + } + + // bytes data = 4; + if (this->data().size() > 0) { + target = + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteBytesToArray( + 4, this->data(), target); + } + + // repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; + for (unsigned int i = 0, + n = static_cast(this->rectangle_list_size()); i < n; i++) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite:: + InternalWriteMessageToArray( + 5, this->rectangle_list(static_cast(i)), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.PresentImageRequest) + return target; +} + +size_t PresentImageRequest::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.PresentImageRequest) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; + { + unsigned int count = static_cast(this->rectangle_list_size()); + total_size += 1UL * count; + for (unsigned int i = 0; i < count; i++) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize( + this->rectangle_list(static_cast(i))); + } + } + + // bytes data = 4; + if (this->data().size() > 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize( + this->data()); + } + + // .ascend.presenter.proto.ImageFormat format = 1; + if (this->format() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->format()); + } + + // uint32 width = 2; + if (this->width() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( + this->width()); + } + + // uint32 height = 3; + if (this->height() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::UInt32Size( + this->height()); + } + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void PresentImageRequest::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.PresentImageRequest) + GOOGLE_DCHECK_NE(&from, this); + const PresentImageRequest* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.PresentImageRequest) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.PresentImageRequest) + MergeFrom(*source); + } +} + +void PresentImageRequest::MergeFrom(const PresentImageRequest& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.PresentImageRequest) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + rectangle_list_.MergeFrom(from.rectangle_list_); + if (from.data().size() > 0) { + + data_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.data_); + } + if (from.format() != 0) { + set_format(from.format()); + } + if (from.width() != 0) { + set_width(from.width()); + } + if (from.height() != 0) { + set_height(from.height()); + } +} + +void PresentImageRequest::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.PresentImageRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void PresentImageRequest::CopyFrom(const PresentImageRequest& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.PresentImageRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool PresentImageRequest::IsInitialized() const { + return true; +} + +void PresentImageRequest::Swap(PresentImageRequest* other) { + if (other == this) return; + InternalSwap(other); +} +void PresentImageRequest::InternalSwap(PresentImageRequest* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + CastToBase(&rectangle_list_)->InternalSwap(CastToBase(&other->rectangle_list_)); + data_.Swap(&other->data_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(format_, other->format_); + swap(width_, other->width_); + swap(height_, other->height_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata PresentImageRequest::GetMetadata() const { + return GetMetadataStatic(); +} + + +// =================================================================== + +void PresentImageResponse::InitAsDefaultInstance() { +} +class PresentImageResponse::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int PresentImageResponse::kErrorCodeFieldNumber; +const int PresentImageResponse::kErrorMessageFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +PresentImageResponse::PresentImageResponse() + : ::PROTOBUF_NAMESPACE_ID::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:ascend.presenter.proto.PresentImageResponse) +} +PresentImageResponse::PresentImageResponse(const PresentImageResponse& from) + : ::PROTOBUF_NAMESPACE_ID::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + error_message_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + if (from.error_message().size() > 0) { + error_message_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.error_message_); + } + error_code_ = from.error_code_; + // @@protoc_insertion_point(copy_constructor:ascend.presenter.proto.PresentImageResponse) +} + +void PresentImageResponse::SharedCtor() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&scc_info_PresentImageResponse_presenter_5fmessage_2eproto.base); + error_message_.UnsafeSetDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + error_code_ = 0; +} + +PresentImageResponse::~PresentImageResponse() { + // @@protoc_insertion_point(destructor:ascend.presenter.proto.PresentImageResponse) + SharedDtor(); +} + +void PresentImageResponse::SharedDtor() { + error_message_.DestroyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} + +void PresentImageResponse::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const PresentImageResponse& PresentImageResponse::default_instance() { + ::PROTOBUF_NAMESPACE_ID::internal::InitSCC(&::scc_info_PresentImageResponse_presenter_5fmessage_2eproto.base); + return *internal_default_instance(); +} + + +void PresentImageResponse::Clear() { +// @@protoc_insertion_point(message_clear_start:ascend.presenter.proto.PresentImageResponse) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + error_message_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); + error_code_ = 0; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* PresentImageResponse::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) { +#define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure + while (!ctx->Done(&ptr)) { + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + ptr = ::PROTOBUF_NAMESPACE_ID::internal::ReadTag(ptr, &tag); + CHK_(ptr); + switch (tag >> 3) { + // .ascend.presenter.proto.PresentDataErrorCode error_code = 1; + case 1: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 8)) { + ::PROTOBUF_NAMESPACE_ID::uint64 val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr); + CHK_(ptr); + set_error_code(static_cast<::ascend::presenter::proto::PresentDataErrorCode>(val)); + } else goto handle_unusual; + continue; + // string error_message = 2; + case 2: + if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 18)) { + ptr = ::PROTOBUF_NAMESPACE_ID::internal::InlineGreedyStringParserUTF8(mutable_error_message(), ptr, ctx, "ascend.presenter.proto.PresentImageResponse.error_message"); + CHK_(ptr); + } else goto handle_unusual; + continue; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->SetLastTag(tag); + goto success; + } + ptr = UnknownFieldParse(tag, &_internal_metadata_, ptr, ctx); + CHK_(ptr != nullptr); + continue; + } + } // switch + } // while +success: + return ptr; +failure: + ptr = nullptr; + goto success; +#undef CHK_ +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool PresentImageResponse::MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::PROTOBUF_NAMESPACE_ID::uint32 tag; + // @@protoc_insertion_point(parse_start:ascend.presenter.proto.PresentImageResponse) + for (;;) { + ::std::pair<::PROTOBUF_NAMESPACE_ID::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .ascend.presenter.proto.PresentDataErrorCode error_code = 1; + case 1: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (8 & 0xFF)) { + int value = 0; + DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPrimitive< + int, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + set_error_code(static_cast< ::ascend::presenter::proto::PresentDataErrorCode >(value)); + } else { + goto handle_unusual; + } + break; + } + + // string error_message = 2; + case 2: { + if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (18 & 0xFF)) { + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadString( + input, this->mutable_error_message())); + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->error_message().data(), static_cast(this->error_message().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, + "ascend.presenter.proto.PresentImageResponse.error_message")); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:ascend.presenter.proto.PresentImageResponse) + return true; +failure: + // @@protoc_insertion_point(parse_failure:ascend.presenter.proto.PresentImageResponse) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void PresentImageResponse::SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:ascend.presenter.proto.PresentImageResponse) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.PresentDataErrorCode error_code = 1; + if (this->error_code() != 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnum( + 1, this->error_code(), output); + } + + // string error_message = 2; + if (this->error_message().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->error_message().data(), static_cast(this->error_message().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.PresentImageResponse.error_message"); + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringMaybeAliased( + 2, this->error_message(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:ascend.presenter.proto.PresentImageResponse) +} + +::PROTOBUF_NAMESPACE_ID::uint8* PresentImageResponse::InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:ascend.presenter.proto.PresentImageResponse) + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .ascend.presenter.proto.PresentDataErrorCode error_code = 1; + if (this->error_code() != 0) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteEnumToArray( + 1, this->error_code(), target); + } + + // string error_message = 2; + if (this->error_message().size() > 0) { + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String( + this->error_message().data(), static_cast(this->error_message().length()), + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE, + "ascend.presenter.proto.PresentImageResponse.error_message"); + target = + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteStringToArray( + 2, this->error_message(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:ascend.presenter.proto.PresentImageResponse) + return target; +} + +size_t PresentImageResponse::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:ascend.presenter.proto.PresentImageResponse) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::PROTOBUF_NAMESPACE_ID::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string error_message = 2; + if (this->error_message().size() > 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize( + this->error_message()); + } + + // .ascend.presenter.proto.PresentDataErrorCode error_code = 1; + if (this->error_code() != 0) { + total_size += 1 + + ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::EnumSize(this->error_code()); + } + + int cached_size = ::PROTOBUF_NAMESPACE_ID::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void PresentImageResponse::MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:ascend.presenter.proto.PresentImageResponse) + GOOGLE_DCHECK_NE(&from, this); + const PresentImageResponse* source = + ::PROTOBUF_NAMESPACE_ID::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:ascend.presenter.proto.PresentImageResponse) + ::PROTOBUF_NAMESPACE_ID::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:ascend.presenter.proto.PresentImageResponse) + MergeFrom(*source); + } +} + +void PresentImageResponse::MergeFrom(const PresentImageResponse& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:ascend.presenter.proto.PresentImageResponse) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::PROTOBUF_NAMESPACE_ID::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.error_message().size() > 0) { + + error_message_.AssignWithDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from.error_message_); + } + if (from.error_code() != 0) { + set_error_code(from.error_code()); + } +} + +void PresentImageResponse::CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:ascend.presenter.proto.PresentImageResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void PresentImageResponse::CopyFrom(const PresentImageResponse& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:ascend.presenter.proto.PresentImageResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool PresentImageResponse::IsInitialized() const { + return true; +} + +void PresentImageResponse::Swap(PresentImageResponse* other) { + if (other == this) return; + InternalSwap(other); +} +void PresentImageResponse::InternalSwap(PresentImageResponse* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + error_message_.Swap(&other->error_message_, &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(error_code_, other->error_code_); +} + +::PROTOBUF_NAMESPACE_ID::Metadata PresentImageResponse::GetMetadata() const { + return GetMetadataStatic(); +} + + +// @@protoc_insertion_point(namespace_scope) +} // namespace proto +} // namespace presenter +} // namespace ascend +PROTOBUF_NAMESPACE_OPEN +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::OpenChannelRequest* Arena::CreateMaybeMessage< ::ascend::presenter::proto::OpenChannelRequest >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::OpenChannelRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::OpenChannelResponse* Arena::CreateMaybeMessage< ::ascend::presenter::proto::OpenChannelResponse >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::OpenChannelResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::HeartbeatMessage* Arena::CreateMaybeMessage< ::ascend::presenter::proto::HeartbeatMessage >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::HeartbeatMessage >(arena); +} +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::Coordinate* Arena::CreateMaybeMessage< ::ascend::presenter::proto::Coordinate >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::Coordinate >(arena); +} +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::Rectangle_Attr* Arena::CreateMaybeMessage< ::ascend::presenter::proto::Rectangle_Attr >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::Rectangle_Attr >(arena); +} +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::PresentImageRequest* Arena::CreateMaybeMessage< ::ascend::presenter::proto::PresentImageRequest >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::PresentImageRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::ascend::presenter::proto::PresentImageResponse* Arena::CreateMaybeMessage< ::ascend::presenter::proto::PresentImageResponse >(Arena* arena) { + return Arena::CreateInternal< ::ascend::presenter::proto::PresentImageResponse >(arena); +} +PROTOBUF_NAMESPACE_CLOSE + +// @@protoc_insertion_point(global_scope) +#include diff --git a/cplusplus/common/presenteragent/proto/presenter_message.pb.h b/cplusplus/common/presenteragent/proto/presenter_message.pb.h new file mode 100644 index 0000000000000000000000000000000000000000..4da3151ecf056c14f4764545d259d40a8f72e07a --- /dev/null +++ b/cplusplus/common/presenteragent/proto/presenter_message.pb.h @@ -0,0 +1,1772 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: presenter_message.proto + +#ifndef GOOGLE_PROTOBUF_INCLUDED_presenter_5fmessage_2eproto +#define GOOGLE_PROTOBUF_INCLUDED_presenter_5fmessage_2eproto + +#include +#include + +#include +#if PROTOBUF_VERSION < 3008000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 3008000 < PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // IWYU pragma: export +#include // IWYU pragma: export +#include +#include +// @@protoc_insertion_point(includes) +#include +#define PROTOBUF_INTERNAL_EXPORT_presenter_5fmessage_2eproto +PROTOBUF_NAMESPACE_OPEN +namespace internal { +class AnyMetadata; +} // namespace internal +PROTOBUF_NAMESPACE_CLOSE + +// Internal implementation detail -- do not use these members. +struct TableStruct_presenter_5fmessage_2eproto { + static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[7] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[]; + static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[]; + static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[]; +}; +extern const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_presenter_5fmessage_2eproto; +namespace ascend { +namespace presenter { +namespace proto { +class Coordinate; +class CoordinateDefaultTypeInternal; +extern CoordinateDefaultTypeInternal _Coordinate_default_instance_; +class HeartbeatMessage; +class HeartbeatMessageDefaultTypeInternal; +extern HeartbeatMessageDefaultTypeInternal _HeartbeatMessage_default_instance_; +class OpenChannelRequest; +class OpenChannelRequestDefaultTypeInternal; +extern OpenChannelRequestDefaultTypeInternal _OpenChannelRequest_default_instance_; +class OpenChannelResponse; +class OpenChannelResponseDefaultTypeInternal; +extern OpenChannelResponseDefaultTypeInternal _OpenChannelResponse_default_instance_; +class PresentImageRequest; +class PresentImageRequestDefaultTypeInternal; +extern PresentImageRequestDefaultTypeInternal _PresentImageRequest_default_instance_; +class PresentImageResponse; +class PresentImageResponseDefaultTypeInternal; +extern PresentImageResponseDefaultTypeInternal _PresentImageResponse_default_instance_; +class Rectangle_Attr; +class Rectangle_AttrDefaultTypeInternal; +extern Rectangle_AttrDefaultTypeInternal _Rectangle_Attr_default_instance_; +} // namespace proto +} // namespace presenter +} // namespace ascend +PROTOBUF_NAMESPACE_OPEN +template<> ::ascend::presenter::proto::Coordinate* Arena::CreateMaybeMessage<::ascend::presenter::proto::Coordinate>(Arena*); +template<> ::ascend::presenter::proto::HeartbeatMessage* Arena::CreateMaybeMessage<::ascend::presenter::proto::HeartbeatMessage>(Arena*); +template<> ::ascend::presenter::proto::OpenChannelRequest* Arena::CreateMaybeMessage<::ascend::presenter::proto::OpenChannelRequest>(Arena*); +template<> ::ascend::presenter::proto::OpenChannelResponse* Arena::CreateMaybeMessage<::ascend::presenter::proto::OpenChannelResponse>(Arena*); +template<> ::ascend::presenter::proto::PresentImageRequest* Arena::CreateMaybeMessage<::ascend::presenter::proto::PresentImageRequest>(Arena*); +template<> ::ascend::presenter::proto::PresentImageResponse* Arena::CreateMaybeMessage<::ascend::presenter::proto::PresentImageResponse>(Arena*); +template<> ::ascend::presenter::proto::Rectangle_Attr* Arena::CreateMaybeMessage<::ascend::presenter::proto::Rectangle_Attr>(Arena*); +PROTOBUF_NAMESPACE_CLOSE +namespace ascend { +namespace presenter { +namespace proto { + +enum OpenChannelErrorCode : int { + kOpenChannelErrorNone = 0, + kOpenChannelErrorNoSuchChannel = 1, + kOpenChannelErrorChannelAlreadyOpened = 2, + kOpenChannelErrorOther = -1, + OpenChannelErrorCode_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(), + OpenChannelErrorCode_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max() +}; +bool OpenChannelErrorCode_IsValid(int value); +constexpr OpenChannelErrorCode OpenChannelErrorCode_MIN = kOpenChannelErrorOther; +constexpr OpenChannelErrorCode OpenChannelErrorCode_MAX = kOpenChannelErrorChannelAlreadyOpened; +constexpr int OpenChannelErrorCode_ARRAYSIZE = OpenChannelErrorCode_MAX + 1; + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* OpenChannelErrorCode_descriptor(); +template +inline const std::string& OpenChannelErrorCode_Name(T enum_t_value) { + static_assert(::std::is_same::value || + ::std::is_integral::value, + "Incorrect type passed to function OpenChannelErrorCode_Name."); + return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum( + OpenChannelErrorCode_descriptor(), enum_t_value); +} +inline bool OpenChannelErrorCode_Parse( + const std::string& name, OpenChannelErrorCode* value) { + return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum( + OpenChannelErrorCode_descriptor(), name, value); +} +enum ChannelContentType : int { + kChannelContentTypeImage = 0, + kChannelContentTypeVideo = 1, + ChannelContentType_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(), + ChannelContentType_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max() +}; +bool ChannelContentType_IsValid(int value); +constexpr ChannelContentType ChannelContentType_MIN = kChannelContentTypeImage; +constexpr ChannelContentType ChannelContentType_MAX = kChannelContentTypeVideo; +constexpr int ChannelContentType_ARRAYSIZE = ChannelContentType_MAX + 1; + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ChannelContentType_descriptor(); +template +inline const std::string& ChannelContentType_Name(T enum_t_value) { + static_assert(::std::is_same::value || + ::std::is_integral::value, + "Incorrect type passed to function ChannelContentType_Name."); + return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum( + ChannelContentType_descriptor(), enum_t_value); +} +inline bool ChannelContentType_Parse( + const std::string& name, ChannelContentType* value) { + return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum( + ChannelContentType_descriptor(), name, value); +} +enum ImageFormat : int { + kImageFormatJpeg = 0, + ImageFormat_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(), + ImageFormat_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max() +}; +bool ImageFormat_IsValid(int value); +constexpr ImageFormat ImageFormat_MIN = kImageFormatJpeg; +constexpr ImageFormat ImageFormat_MAX = kImageFormatJpeg; +constexpr int ImageFormat_ARRAYSIZE = ImageFormat_MAX + 1; + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ImageFormat_descriptor(); +template +inline const std::string& ImageFormat_Name(T enum_t_value) { + static_assert(::std::is_same::value || + ::std::is_integral::value, + "Incorrect type passed to function ImageFormat_Name."); + return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum( + ImageFormat_descriptor(), enum_t_value); +} +inline bool ImageFormat_Parse( + const std::string& name, ImageFormat* value) { + return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum( + ImageFormat_descriptor(), name, value); +} +enum PresentDataErrorCode : int { + kPresentDataErrorNone = 0, + kPresentDataErrorUnsupportedType = 1, + kPresentDataErrorUnsupportedFormat = 2, + kPresentDataErrorOther = -1, + PresentDataErrorCode_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(), + PresentDataErrorCode_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max() +}; +bool PresentDataErrorCode_IsValid(int value); +constexpr PresentDataErrorCode PresentDataErrorCode_MIN = kPresentDataErrorOther; +constexpr PresentDataErrorCode PresentDataErrorCode_MAX = kPresentDataErrorUnsupportedFormat; +constexpr int PresentDataErrorCode_ARRAYSIZE = PresentDataErrorCode_MAX + 1; + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* PresentDataErrorCode_descriptor(); +template +inline const std::string& PresentDataErrorCode_Name(T enum_t_value) { + static_assert(::std::is_same::value || + ::std::is_integral::value, + "Incorrect type passed to function PresentDataErrorCode_Name."); + return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum( + PresentDataErrorCode_descriptor(), enum_t_value); +} +inline bool PresentDataErrorCode_Parse( + const std::string& name, PresentDataErrorCode* value) { + return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum( + PresentDataErrorCode_descriptor(), name, value); +} +// =================================================================== + +class OpenChannelRequest : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.OpenChannelRequest) */ { + public: + OpenChannelRequest(); + virtual ~OpenChannelRequest(); + + OpenChannelRequest(const OpenChannelRequest& from); + OpenChannelRequest(OpenChannelRequest&& from) noexcept + : OpenChannelRequest() { + *this = ::std::move(from); + } + + inline OpenChannelRequest& operator=(const OpenChannelRequest& from) { + CopyFrom(from); + return *this; + } + inline OpenChannelRequest& operator=(OpenChannelRequest&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const OpenChannelRequest& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const OpenChannelRequest* internal_default_instance() { + return reinterpret_cast( + &_OpenChannelRequest_default_instance_); + } + static constexpr int kIndexInFileMessages = + 0; + + void Swap(OpenChannelRequest* other); + friend void swap(OpenChannelRequest& a, OpenChannelRequest& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline OpenChannelRequest* New() const final { + return CreateMaybeMessage(nullptr); + } + + OpenChannelRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const OpenChannelRequest& from); + void MergeFrom(const OpenChannelRequest& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(OpenChannelRequest* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.OpenChannelRequest"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string channel_name = 1; + void clear_channel_name(); + static const int kChannelNameFieldNumber = 1; + const std::string& channel_name() const; + void set_channel_name(const std::string& value); + void set_channel_name(std::string&& value); + void set_channel_name(const char* value); + void set_channel_name(const char* value, size_t size); + std::string* mutable_channel_name(); + std::string* release_channel_name(); + void set_allocated_channel_name(std::string* channel_name); + + // .ascend.presenter.proto.ChannelContentType content_type = 2; + void clear_content_type(); + static const int kContentTypeFieldNumber = 2; + ::ascend::presenter::proto::ChannelContentType content_type() const; + void set_content_type(::ascend::presenter::proto::ChannelContentType value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.OpenChannelRequest) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr channel_name_; + int content_type_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class OpenChannelResponse : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.OpenChannelResponse) */ { + public: + OpenChannelResponse(); + virtual ~OpenChannelResponse(); + + OpenChannelResponse(const OpenChannelResponse& from); + OpenChannelResponse(OpenChannelResponse&& from) noexcept + : OpenChannelResponse() { + *this = ::std::move(from); + } + + inline OpenChannelResponse& operator=(const OpenChannelResponse& from) { + CopyFrom(from); + return *this; + } + inline OpenChannelResponse& operator=(OpenChannelResponse&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const OpenChannelResponse& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const OpenChannelResponse* internal_default_instance() { + return reinterpret_cast( + &_OpenChannelResponse_default_instance_); + } + static constexpr int kIndexInFileMessages = + 1; + + void Swap(OpenChannelResponse* other); + friend void swap(OpenChannelResponse& a, OpenChannelResponse& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline OpenChannelResponse* New() const final { + return CreateMaybeMessage(nullptr); + } + + OpenChannelResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const OpenChannelResponse& from); + void MergeFrom(const OpenChannelResponse& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(OpenChannelResponse* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.OpenChannelResponse"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string error_message = 2; + void clear_error_message(); + static const int kErrorMessageFieldNumber = 2; + const std::string& error_message() const; + void set_error_message(const std::string& value); + void set_error_message(std::string&& value); + void set_error_message(const char* value); + void set_error_message(const char* value, size_t size); + std::string* mutable_error_message(); + std::string* release_error_message(); + void set_allocated_error_message(std::string* error_message); + + // .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; + void clear_error_code(); + static const int kErrorCodeFieldNumber = 1; + ::ascend::presenter::proto::OpenChannelErrorCode error_code() const; + void set_error_code(::ascend::presenter::proto::OpenChannelErrorCode value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.OpenChannelResponse) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr error_message_; + int error_code_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class HeartbeatMessage : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.HeartbeatMessage) */ { + public: + HeartbeatMessage(); + virtual ~HeartbeatMessage(); + + HeartbeatMessage(const HeartbeatMessage& from); + HeartbeatMessage(HeartbeatMessage&& from) noexcept + : HeartbeatMessage() { + *this = ::std::move(from); + } + + inline HeartbeatMessage& operator=(const HeartbeatMessage& from) { + CopyFrom(from); + return *this; + } + inline HeartbeatMessage& operator=(HeartbeatMessage&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const HeartbeatMessage& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const HeartbeatMessage* internal_default_instance() { + return reinterpret_cast( + &_HeartbeatMessage_default_instance_); + } + static constexpr int kIndexInFileMessages = + 2; + + void Swap(HeartbeatMessage* other); + friend void swap(HeartbeatMessage& a, HeartbeatMessage& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline HeartbeatMessage* New() const final { + return CreateMaybeMessage(nullptr); + } + + HeartbeatMessage* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const HeartbeatMessage& from); + void MergeFrom(const HeartbeatMessage& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(HeartbeatMessage* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.HeartbeatMessage"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.HeartbeatMessage) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class Coordinate : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.Coordinate) */ { + public: + Coordinate(); + virtual ~Coordinate(); + + Coordinate(const Coordinate& from); + Coordinate(Coordinate&& from) noexcept + : Coordinate() { + *this = ::std::move(from); + } + + inline Coordinate& operator=(const Coordinate& from) { + CopyFrom(from); + return *this; + } + inline Coordinate& operator=(Coordinate&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const Coordinate& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const Coordinate* internal_default_instance() { + return reinterpret_cast( + &_Coordinate_default_instance_); + } + static constexpr int kIndexInFileMessages = + 3; + + void Swap(Coordinate* other); + friend void swap(Coordinate& a, Coordinate& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline Coordinate* New() const final { + return CreateMaybeMessage(nullptr); + } + + Coordinate* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const Coordinate& from); + void MergeFrom(const Coordinate& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(Coordinate* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.Coordinate"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // uint32 x = 1; + void clear_x(); + static const int kXFieldNumber = 1; + ::PROTOBUF_NAMESPACE_ID::uint32 x() const; + void set_x(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // uint32 y = 2; + void clear_y(); + static const int kYFieldNumber = 2; + ::PROTOBUF_NAMESPACE_ID::uint32 y() const; + void set_y(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.Coordinate) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::uint32 x_; + ::PROTOBUF_NAMESPACE_ID::uint32 y_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class Rectangle_Attr : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.Rectangle_Attr) */ { + public: + Rectangle_Attr(); + virtual ~Rectangle_Attr(); + + Rectangle_Attr(const Rectangle_Attr& from); + Rectangle_Attr(Rectangle_Attr&& from) noexcept + : Rectangle_Attr() { + *this = ::std::move(from); + } + + inline Rectangle_Attr& operator=(const Rectangle_Attr& from) { + CopyFrom(from); + return *this; + } + inline Rectangle_Attr& operator=(Rectangle_Attr&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const Rectangle_Attr& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const Rectangle_Attr* internal_default_instance() { + return reinterpret_cast( + &_Rectangle_Attr_default_instance_); + } + static constexpr int kIndexInFileMessages = + 4; + + void Swap(Rectangle_Attr* other); + friend void swap(Rectangle_Attr& a, Rectangle_Attr& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline Rectangle_Attr* New() const final { + return CreateMaybeMessage(nullptr); + } + + Rectangle_Attr* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const Rectangle_Attr& from); + void MergeFrom(const Rectangle_Attr& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(Rectangle_Attr* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.Rectangle_Attr"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string label_text = 3; + void clear_label_text(); + static const int kLabelTextFieldNumber = 3; + const std::string& label_text() const; + void set_label_text(const std::string& value); + void set_label_text(std::string&& value); + void set_label_text(const char* value); + void set_label_text(const char* value, size_t size); + std::string* mutable_label_text(); + std::string* release_label_text(); + void set_allocated_label_text(std::string* label_text); + + // .ascend.presenter.proto.Coordinate left_top = 1; + bool has_left_top() const; + void clear_left_top(); + static const int kLeftTopFieldNumber = 1; + const ::ascend::presenter::proto::Coordinate& left_top() const; + ::ascend::presenter::proto::Coordinate* release_left_top(); + ::ascend::presenter::proto::Coordinate* mutable_left_top(); + void set_allocated_left_top(::ascend::presenter::proto::Coordinate* left_top); + + // .ascend.presenter.proto.Coordinate right_bottom = 2; + bool has_right_bottom() const; + void clear_right_bottom(); + static const int kRightBottomFieldNumber = 2; + const ::ascend::presenter::proto::Coordinate& right_bottom() const; + ::ascend::presenter::proto::Coordinate* release_right_bottom(); + ::ascend::presenter::proto::Coordinate* mutable_right_bottom(); + void set_allocated_right_bottom(::ascend::presenter::proto::Coordinate* right_bottom); + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.Rectangle_Attr) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr label_text_; + ::ascend::presenter::proto::Coordinate* left_top_; + ::ascend::presenter::proto::Coordinate* right_bottom_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class PresentImageRequest : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.PresentImageRequest) */ { + public: + PresentImageRequest(); + virtual ~PresentImageRequest(); + + PresentImageRequest(const PresentImageRequest& from); + PresentImageRequest(PresentImageRequest&& from) noexcept + : PresentImageRequest() { + *this = ::std::move(from); + } + + inline PresentImageRequest& operator=(const PresentImageRequest& from) { + CopyFrom(from); + return *this; + } + inline PresentImageRequest& operator=(PresentImageRequest&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const PresentImageRequest& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const PresentImageRequest* internal_default_instance() { + return reinterpret_cast( + &_PresentImageRequest_default_instance_); + } + static constexpr int kIndexInFileMessages = + 5; + + void Swap(PresentImageRequest* other); + friend void swap(PresentImageRequest& a, PresentImageRequest& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline PresentImageRequest* New() const final { + return CreateMaybeMessage(nullptr); + } + + PresentImageRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const PresentImageRequest& from); + void MergeFrom(const PresentImageRequest& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(PresentImageRequest* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.PresentImageRequest"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; + int rectangle_list_size() const; + void clear_rectangle_list(); + static const int kRectangleListFieldNumber = 5; + ::ascend::presenter::proto::Rectangle_Attr* mutable_rectangle_list(int index); + ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::proto::Rectangle_Attr >* + mutable_rectangle_list(); + const ::ascend::presenter::proto::Rectangle_Attr& rectangle_list(int index) const; + ::ascend::presenter::proto::Rectangle_Attr* add_rectangle_list(); + const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::proto::Rectangle_Attr >& + rectangle_list() const; + + // bytes data = 4; + void clear_data(); + static const int kDataFieldNumber = 4; + const std::string& data() const; + void set_data(const std::string& value); + void set_data(std::string&& value); + void set_data(const char* value); + void set_data(const void* value, size_t size); + std::string* mutable_data(); + std::string* release_data(); + void set_allocated_data(std::string* data); + + // .ascend.presenter.proto.ImageFormat format = 1; + void clear_format(); + static const int kFormatFieldNumber = 1; + ::ascend::presenter::proto::ImageFormat format() const; + void set_format(::ascend::presenter::proto::ImageFormat value); + + // uint32 width = 2; + void clear_width(); + static const int kWidthFieldNumber = 2; + ::PROTOBUF_NAMESPACE_ID::uint32 width() const; + void set_width(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // uint32 height = 3; + void clear_height(); + static const int kHeightFieldNumber = 3; + ::PROTOBUF_NAMESPACE_ID::uint32 height() const; + void set_height(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.PresentImageRequest) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::proto::Rectangle_Attr > rectangle_list_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr data_; + int format_; + ::PROTOBUF_NAMESPACE_ID::uint32 width_; + ::PROTOBUF_NAMESPACE_ID::uint32 height_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class PresentImageResponse : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.proto.PresentImageResponse) */ { + public: + PresentImageResponse(); + virtual ~PresentImageResponse(); + + PresentImageResponse(const PresentImageResponse& from); + PresentImageResponse(PresentImageResponse&& from) noexcept + : PresentImageResponse() { + *this = ::std::move(from); + } + + inline PresentImageResponse& operator=(const PresentImageResponse& from) { + CopyFrom(from); + return *this; + } + inline PresentImageResponse& operator=(PresentImageResponse&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const PresentImageResponse& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const PresentImageResponse* internal_default_instance() { + return reinterpret_cast( + &_PresentImageResponse_default_instance_); + } + static constexpr int kIndexInFileMessages = + 6; + + void Swap(PresentImageResponse* other); + friend void swap(PresentImageResponse& a, PresentImageResponse& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline PresentImageResponse* New() const final { + return CreateMaybeMessage(nullptr); + } + + PresentImageResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const PresentImageResponse& from); + void MergeFrom(const PresentImageResponse& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(PresentImageResponse* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.proto.PresentImageResponse"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_presenter_5fmessage_2eproto); + return ::descriptor_table_presenter_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string error_message = 2; + void clear_error_message(); + static const int kErrorMessageFieldNumber = 2; + const std::string& error_message() const; + void set_error_message(const std::string& value); + void set_error_message(std::string&& value); + void set_error_message(const char* value); + void set_error_message(const char* value, size_t size); + std::string* mutable_error_message(); + std::string* release_error_message(); + void set_allocated_error_message(std::string* error_message); + + // .ascend.presenter.proto.PresentDataErrorCode error_code = 1; + void clear_error_code(); + static const int kErrorCodeFieldNumber = 1; + ::ascend::presenter::proto::PresentDataErrorCode error_code() const; + void set_error_code(::ascend::presenter::proto::PresentDataErrorCode value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.proto.PresentImageResponse) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr error_message_; + int error_code_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_presenter_5fmessage_2eproto; +}; +// =================================================================== + + +// =================================================================== + +#ifdef __GNUC__ + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstrict-aliasing" +#endif // __GNUC__ +// OpenChannelRequest + +// string channel_name = 1; +inline void OpenChannelRequest::clear_channel_name() { + channel_name_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& OpenChannelRequest::channel_name() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.OpenChannelRequest.channel_name) + return channel_name_.GetNoArena(); +} +inline void OpenChannelRequest::set_channel_name(const std::string& value) { + + channel_name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.proto.OpenChannelRequest.channel_name) +} +inline void OpenChannelRequest::set_channel_name(std::string&& value) { + + channel_name_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.proto.OpenChannelRequest.channel_name) +} +inline void OpenChannelRequest::set_channel_name(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + channel_name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.proto.OpenChannelRequest.channel_name) +} +inline void OpenChannelRequest::set_channel_name(const char* value, size_t size) { + + channel_name_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.proto.OpenChannelRequest.channel_name) +} +inline std::string* OpenChannelRequest::mutable_channel_name() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.OpenChannelRequest.channel_name) + return channel_name_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* OpenChannelRequest::release_channel_name() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.OpenChannelRequest.channel_name) + + return channel_name_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void OpenChannelRequest::set_allocated_channel_name(std::string* channel_name) { + if (channel_name != nullptr) { + + } else { + + } + channel_name_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), channel_name); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.OpenChannelRequest.channel_name) +} + +// .ascend.presenter.proto.ChannelContentType content_type = 2; +inline void OpenChannelRequest::clear_content_type() { + content_type_ = 0; +} +inline ::ascend::presenter::proto::ChannelContentType OpenChannelRequest::content_type() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.OpenChannelRequest.content_type) + return static_cast< ::ascend::presenter::proto::ChannelContentType >(content_type_); +} +inline void OpenChannelRequest::set_content_type(::ascend::presenter::proto::ChannelContentType value) { + + content_type_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.OpenChannelRequest.content_type) +} + +// ------------------------------------------------------------------- + +// OpenChannelResponse + +// .ascend.presenter.proto.OpenChannelErrorCode error_code = 1; +inline void OpenChannelResponse::clear_error_code() { + error_code_ = 0; +} +inline ::ascend::presenter::proto::OpenChannelErrorCode OpenChannelResponse::error_code() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.OpenChannelResponse.error_code) + return static_cast< ::ascend::presenter::proto::OpenChannelErrorCode >(error_code_); +} +inline void OpenChannelResponse::set_error_code(::ascend::presenter::proto::OpenChannelErrorCode value) { + + error_code_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.OpenChannelResponse.error_code) +} + +// string error_message = 2; +inline void OpenChannelResponse::clear_error_message() { + error_message_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& OpenChannelResponse::error_message() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.OpenChannelResponse.error_message) + return error_message_.GetNoArena(); +} +inline void OpenChannelResponse::set_error_message(const std::string& value) { + + error_message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.proto.OpenChannelResponse.error_message) +} +inline void OpenChannelResponse::set_error_message(std::string&& value) { + + error_message_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.proto.OpenChannelResponse.error_message) +} +inline void OpenChannelResponse::set_error_message(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + error_message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.proto.OpenChannelResponse.error_message) +} +inline void OpenChannelResponse::set_error_message(const char* value, size_t size) { + + error_message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.proto.OpenChannelResponse.error_message) +} +inline std::string* OpenChannelResponse::mutable_error_message() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.OpenChannelResponse.error_message) + return error_message_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* OpenChannelResponse::release_error_message() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.OpenChannelResponse.error_message) + + return error_message_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void OpenChannelResponse::set_allocated_error_message(std::string* error_message) { + if (error_message != nullptr) { + + } else { + + } + error_message_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), error_message); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.OpenChannelResponse.error_message) +} + +// ------------------------------------------------------------------- + +// HeartbeatMessage + +// ------------------------------------------------------------------- + +// Coordinate + +// uint32 x = 1; +inline void Coordinate::clear_x() { + x_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 Coordinate::x() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.Coordinate.x) + return x_; +} +inline void Coordinate::set_x(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + x_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.Coordinate.x) +} + +// uint32 y = 2; +inline void Coordinate::clear_y() { + y_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 Coordinate::y() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.Coordinate.y) + return y_; +} +inline void Coordinate::set_y(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + y_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.Coordinate.y) +} + +// ------------------------------------------------------------------- + +// Rectangle_Attr + +// .ascend.presenter.proto.Coordinate left_top = 1; +inline bool Rectangle_Attr::has_left_top() const { + return this != internal_default_instance() && left_top_ != nullptr; +} +inline void Rectangle_Attr::clear_left_top() { + if (GetArenaNoVirtual() == nullptr && left_top_ != nullptr) { + delete left_top_; + } + left_top_ = nullptr; +} +inline const ::ascend::presenter::proto::Coordinate& Rectangle_Attr::left_top() const { + const ::ascend::presenter::proto::Coordinate* p = left_top_; + // @@protoc_insertion_point(field_get:ascend.presenter.proto.Rectangle_Attr.left_top) + return p != nullptr ? *p : *reinterpret_cast( + &::ascend::presenter::proto::_Coordinate_default_instance_); +} +inline ::ascend::presenter::proto::Coordinate* Rectangle_Attr::release_left_top() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.Rectangle_Attr.left_top) + + ::ascend::presenter::proto::Coordinate* temp = left_top_; + left_top_ = nullptr; + return temp; +} +inline ::ascend::presenter::proto::Coordinate* Rectangle_Attr::mutable_left_top() { + + if (left_top_ == nullptr) { + auto* p = CreateMaybeMessage<::ascend::presenter::proto::Coordinate>(GetArenaNoVirtual()); + left_top_ = p; + } + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.Rectangle_Attr.left_top) + return left_top_; +} +inline void Rectangle_Attr::set_allocated_left_top(::ascend::presenter::proto::Coordinate* left_top) { + ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete left_top_; + } + if (left_top) { + ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + left_top = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( + message_arena, left_top, submessage_arena); + } + + } else { + + } + left_top_ = left_top; + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.Rectangle_Attr.left_top) +} + +// .ascend.presenter.proto.Coordinate right_bottom = 2; +inline bool Rectangle_Attr::has_right_bottom() const { + return this != internal_default_instance() && right_bottom_ != nullptr; +} +inline void Rectangle_Attr::clear_right_bottom() { + if (GetArenaNoVirtual() == nullptr && right_bottom_ != nullptr) { + delete right_bottom_; + } + right_bottom_ = nullptr; +} +inline const ::ascend::presenter::proto::Coordinate& Rectangle_Attr::right_bottom() const { + const ::ascend::presenter::proto::Coordinate* p = right_bottom_; + // @@protoc_insertion_point(field_get:ascend.presenter.proto.Rectangle_Attr.right_bottom) + return p != nullptr ? *p : *reinterpret_cast( + &::ascend::presenter::proto::_Coordinate_default_instance_); +} +inline ::ascend::presenter::proto::Coordinate* Rectangle_Attr::release_right_bottom() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.Rectangle_Attr.right_bottom) + + ::ascend::presenter::proto::Coordinate* temp = right_bottom_; + right_bottom_ = nullptr; + return temp; +} +inline ::ascend::presenter::proto::Coordinate* Rectangle_Attr::mutable_right_bottom() { + + if (right_bottom_ == nullptr) { + auto* p = CreateMaybeMessage<::ascend::presenter::proto::Coordinate>(GetArenaNoVirtual()); + right_bottom_ = p; + } + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.Rectangle_Attr.right_bottom) + return right_bottom_; +} +inline void Rectangle_Attr::set_allocated_right_bottom(::ascend::presenter::proto::Coordinate* right_bottom) { + ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete right_bottom_; + } + if (right_bottom) { + ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + right_bottom = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( + message_arena, right_bottom, submessage_arena); + } + + } else { + + } + right_bottom_ = right_bottom; + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.Rectangle_Attr.right_bottom) +} + +// string label_text = 3; +inline void Rectangle_Attr::clear_label_text() { + label_text_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& Rectangle_Attr::label_text() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.Rectangle_Attr.label_text) + return label_text_.GetNoArena(); +} +inline void Rectangle_Attr::set_label_text(const std::string& value) { + + label_text_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.proto.Rectangle_Attr.label_text) +} +inline void Rectangle_Attr::set_label_text(std::string&& value) { + + label_text_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.proto.Rectangle_Attr.label_text) +} +inline void Rectangle_Attr::set_label_text(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + label_text_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.proto.Rectangle_Attr.label_text) +} +inline void Rectangle_Attr::set_label_text(const char* value, size_t size) { + + label_text_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.proto.Rectangle_Attr.label_text) +} +inline std::string* Rectangle_Attr::mutable_label_text() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.Rectangle_Attr.label_text) + return label_text_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* Rectangle_Attr::release_label_text() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.Rectangle_Attr.label_text) + + return label_text_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void Rectangle_Attr::set_allocated_label_text(std::string* label_text) { + if (label_text != nullptr) { + + } else { + + } + label_text_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), label_text); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.Rectangle_Attr.label_text) +} + +// ------------------------------------------------------------------- + +// PresentImageRequest + +// .ascend.presenter.proto.ImageFormat format = 1; +inline void PresentImageRequest::clear_format() { + format_ = 0; +} +inline ::ascend::presenter::proto::ImageFormat PresentImageRequest::format() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageRequest.format) + return static_cast< ::ascend::presenter::proto::ImageFormat >(format_); +} +inline void PresentImageRequest::set_format(::ascend::presenter::proto::ImageFormat value) { + + format_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.PresentImageRequest.format) +} + +// uint32 width = 2; +inline void PresentImageRequest::clear_width() { + width_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 PresentImageRequest::width() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageRequest.width) + return width_; +} +inline void PresentImageRequest::set_width(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + width_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.PresentImageRequest.width) +} + +// uint32 height = 3; +inline void PresentImageRequest::clear_height() { + height_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 PresentImageRequest::height() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageRequest.height) + return height_; +} +inline void PresentImageRequest::set_height(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + height_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.PresentImageRequest.height) +} + +// bytes data = 4; +inline void PresentImageRequest::clear_data() { + data_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& PresentImageRequest::data() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageRequest.data) + return data_.GetNoArena(); +} +inline void PresentImageRequest::set_data(const std::string& value) { + + data_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.proto.PresentImageRequest.data) +} +inline void PresentImageRequest::set_data(std::string&& value) { + + data_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.proto.PresentImageRequest.data) +} +inline void PresentImageRequest::set_data(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + data_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.proto.PresentImageRequest.data) +} +inline void PresentImageRequest::set_data(const void* value, size_t size) { + + data_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.proto.PresentImageRequest.data) +} +inline std::string* PresentImageRequest::mutable_data() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.PresentImageRequest.data) + return data_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* PresentImageRequest::release_data() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.PresentImageRequest.data) + + return data_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void PresentImageRequest::set_allocated_data(std::string* data) { + if (data != nullptr) { + + } else { + + } + data_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), data); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.PresentImageRequest.data) +} + +// repeated .ascend.presenter.proto.Rectangle_Attr rectangle_list = 5; +inline int PresentImageRequest::rectangle_list_size() const { + return rectangle_list_.size(); +} +inline void PresentImageRequest::clear_rectangle_list() { + rectangle_list_.Clear(); +} +inline ::ascend::presenter::proto::Rectangle_Attr* PresentImageRequest::mutable_rectangle_list(int index) { + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.PresentImageRequest.rectangle_list) + return rectangle_list_.Mutable(index); +} +inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::proto::Rectangle_Attr >* +PresentImageRequest::mutable_rectangle_list() { + // @@protoc_insertion_point(field_mutable_list:ascend.presenter.proto.PresentImageRequest.rectangle_list) + return &rectangle_list_; +} +inline const ::ascend::presenter::proto::Rectangle_Attr& PresentImageRequest::rectangle_list(int index) const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageRequest.rectangle_list) + return rectangle_list_.Get(index); +} +inline ::ascend::presenter::proto::Rectangle_Attr* PresentImageRequest::add_rectangle_list() { + // @@protoc_insertion_point(field_add:ascend.presenter.proto.PresentImageRequest.rectangle_list) + return rectangle_list_.Add(); +} +inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::proto::Rectangle_Attr >& +PresentImageRequest::rectangle_list() const { + // @@protoc_insertion_point(field_list:ascend.presenter.proto.PresentImageRequest.rectangle_list) + return rectangle_list_; +} + +// ------------------------------------------------------------------- + +// PresentImageResponse + +// .ascend.presenter.proto.PresentDataErrorCode error_code = 1; +inline void PresentImageResponse::clear_error_code() { + error_code_ = 0; +} +inline ::ascend::presenter::proto::PresentDataErrorCode PresentImageResponse::error_code() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageResponse.error_code) + return static_cast< ::ascend::presenter::proto::PresentDataErrorCode >(error_code_); +} +inline void PresentImageResponse::set_error_code(::ascend::presenter::proto::PresentDataErrorCode value) { + + error_code_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.proto.PresentImageResponse.error_code) +} + +// string error_message = 2; +inline void PresentImageResponse::clear_error_message() { + error_message_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& PresentImageResponse::error_message() const { + // @@protoc_insertion_point(field_get:ascend.presenter.proto.PresentImageResponse.error_message) + return error_message_.GetNoArena(); +} +inline void PresentImageResponse::set_error_message(const std::string& value) { + + error_message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.proto.PresentImageResponse.error_message) +} +inline void PresentImageResponse::set_error_message(std::string&& value) { + + error_message_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.proto.PresentImageResponse.error_message) +} +inline void PresentImageResponse::set_error_message(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + error_message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.proto.PresentImageResponse.error_message) +} +inline void PresentImageResponse::set_error_message(const char* value, size_t size) { + + error_message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.proto.PresentImageResponse.error_message) +} +inline std::string* PresentImageResponse::mutable_error_message() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.proto.PresentImageResponse.error_message) + return error_message_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* PresentImageResponse::release_error_message() { + // @@protoc_insertion_point(field_release:ascend.presenter.proto.PresentImageResponse.error_message) + + return error_message_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void PresentImageResponse::set_allocated_error_message(std::string* error_message) { + if (error_message != nullptr) { + + } else { + + } + error_message_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), error_message); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.proto.PresentImageResponse.error_message) +} + +#ifdef __GNUC__ + #pragma GCC diagnostic pop +#endif // __GNUC__ +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace proto +} // namespace presenter +} // namespace ascend + +PROTOBUF_NAMESPACE_OPEN + +template <> struct is_proto_enum< ::ascend::presenter::proto::OpenChannelErrorCode> : ::std::true_type {}; +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::ascend::presenter::proto::OpenChannelErrorCode>() { + return ::ascend::presenter::proto::OpenChannelErrorCode_descriptor(); +} +template <> struct is_proto_enum< ::ascend::presenter::proto::ChannelContentType> : ::std::true_type {}; +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::ascend::presenter::proto::ChannelContentType>() { + return ::ascend::presenter::proto::ChannelContentType_descriptor(); +} +template <> struct is_proto_enum< ::ascend::presenter::proto::ImageFormat> : ::std::true_type {}; +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::ascend::presenter::proto::ImageFormat>() { + return ::ascend::presenter::proto::ImageFormat_descriptor(); +} +template <> struct is_proto_enum< ::ascend::presenter::proto::PresentDataErrorCode> : ::std::true_type {}; +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::ascend::presenter::proto::PresentDataErrorCode>() { + return ::ascend::presenter::proto::PresentDataErrorCode_descriptor(); +} + +PROTOBUF_NAMESPACE_CLOSE + +// @@protoc_insertion_point(global_scope) + +#include +#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_presenter_5fmessage_2eproto diff --git a/cplusplus/common/presenteragent/proto/presenter_message.proto b/cplusplus/common/presenteragent/proto/presenter_message.proto new file mode 100644 index 0000000000000000000000000000000000000000..879d5572b3cd434dc41dd18ecbd3e2f16ed3f42c --- /dev/null +++ b/cplusplus/common/presenteragent/proto/presenter_message.proto @@ -0,0 +1,67 @@ +syntax = "proto3"; + +package ascend.presenter.proto; + +enum OpenChannelErrorCode { + kOpenChannelErrorNone = 0; + kOpenChannelErrorNoSuchChannel = 1; + kOpenChannelErrorChannelAlreadyOpened = 2; + kOpenChannelErrorOther = -1; +} + +enum ChannelContentType { + kChannelContentTypeImage = 0; + kChannelContentTypeVideo = 1; +} + +// By Protocol Buffer Style Guide, need to use underscore_separated_names +// for field names +message OpenChannelRequest { + string channel_name = 1; + ChannelContentType content_type = 2; +} + +message OpenChannelResponse { + OpenChannelErrorCode error_code = 1; + string error_message = 2; +} + +message HeartbeatMessage { + +} + +enum ImageFormat { + kImageFormatJpeg = 0; +} + +message Coordinate { + uint32 x = 1; + uint32 y = 2; +} + +message Rectangle_Attr { + Coordinate left_top = 1; + Coordinate right_bottom = 2; + string label_text = 3; +} + +message PresentImageRequest { + ImageFormat format = 1; + uint32 width = 2; + uint32 height = 3; + bytes data = 4; + repeated Rectangle_Attr rectangle_list = 5; +} + +enum PresentDataErrorCode { + kPresentDataErrorNone = 0; + kPresentDataErrorUnsupportedType = 1; + kPresentDataErrorUnsupportedFormat = 2; + kPresentDataErrorOther = -1; +} + +message PresentImageResponse { + PresentDataErrorCode error_code = 1; + string error_message = 2; +} + diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/channel.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/channel.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5b95b5f65bba590125194771f792d73df98c7f2b --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/channel.cpp @@ -0,0 +1,50 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/channel/default_channel.h" + +namespace ascend { +namespace presenter { + +Channel* ChannelFactory::NewChannel(const std::string& host_ip, uint16_t port) { + return DefaultChannel::NewChannel(host_ip, port, nullptr); +} + +Channel* ChannelFactory::NewChannel( + const std::string& host_ip, uint16_t port, + std::shared_ptr handler) { + return DefaultChannel::NewChannel(host_ip, port, handler); +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/default_channel.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/default_channel.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3f56f474f9a9a27365a1139c4dc5e089697959f3 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/default_channel.cpp @@ -0,0 +1,291 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include +#include +#include +#include +#include +#include + +#include "proto/presenter_message.pb.h" + +#include "ascenddk/presenter/agent/channel/default_channel.h" +#include "ascenddk/presenter/agent/net/raw_socket_factory.h" +#include "ascenddk/presenter/agent/util/logging.h" + +using namespace std; +using namespace google::protobuf; + +namespace { +const int HEARTBEAT_INTERVAL = 1500; // 1.5s +} + +namespace ascend { +namespace presenter { + +DefaultChannel* DefaultChannel::NewChannel( + const std::string& host_ip, uint16_t port, + std::shared_ptr handler) { + DefaultChannel *channel = nullptr; + std::shared_ptr fac( + new (std::nothrow) RawSocketFactory(host_ip, port)); + if (fac != nullptr) { + channel = new (std::nothrow) DefaultChannel(fac); + if (channel != nullptr && handler != nullptr) { + channel->SetInitChannelHandler(handler); + } + } + + return channel; +} + +DefaultChannel::DefaultChannel(std::shared_ptr socket_factory) + : socket_factory_(socket_factory), + open_(false), + disposed_(false) { +} + +DefaultChannel::~DefaultChannel() { + disposed_ = true; + if (heartbeat_thread_ != nullptr) { + heartbeat_thread_->join(); + } +} + +void DefaultChannel::SetInitChannelHandler( + std::shared_ptr handler) { + init_channel_handler_ = handler; +} + +shared_ptr DefaultChannel::GetInitChannelHandler() { + return init_channel_handler_; +} + +PresenterErrorCode DefaultChannel::HandleInitialization( + const Message& message) { + // send init request + PresenterErrorCode error_code = conn_->SendMessage(message); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to send init request, %d", error_code); + return error_code; + } + + // receive init response + unique_ptr resp; + error_code = conn_->ReceiveMessage(resp); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to send init response, %d", error_code); + return error_code; + } + + // check response + if (!init_channel_handler_->CheckInitResponse(*resp)) { + AGENT_LOG_ERROR("App check response failed"); + return PresenterErrorCode::kAppDefinedError; + } + + return PresenterErrorCode::kNone; +} + +PresenterErrorCode DefaultChannel::Open() { + //check request generation before connection + unique_ptr message; + if (init_channel_handler_ != nullptr) { + message.reset(init_channel_handler_->CreateInitRequest()); + if (message == nullptr) { + AGENT_LOG_ERROR("App create init request failed"); + return PresenterErrorCode::kAppDefinedError; + } + } + + Socket* sock = socket_factory_->Create(); + PresenterErrorCode error_code = socket_factory_->GetErrorCode(); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to create socket, %d", error_code); + return error_code; + } + + Connection* conn = Connection::New(sock); + if (conn == nullptr) { + delete sock; + return PresenterErrorCode::kBadAlloc; + } + this->conn_.reset(conn); + + //perform init process + if (message != nullptr) { + error_code = HandleInitialization(*message); + if (error_code != PresenterErrorCode::kNone) { + conn_.reset(nullptr); + return error_code; + } + } + + open_ = true; + // prevent from starting multiple thread + if (heartbeat_thread_ == nullptr) { + StartHeartbeatThread(); + } + + return PresenterErrorCode::kNone; +} + +void DefaultChannel::StartHeartbeatThread() { + this->heartbeat_thread_.reset( + new (nothrow) thread(bind(&DefaultChannel::KeepAlive, this))); + + if (heartbeat_thread_ != nullptr) { + AGENT_LOG_INFO("heartbeat thread started"); + } +} + +void DefaultChannel::KeepAlive() { + chrono::milliseconds heartbeatInterval(HEARTBEAT_INTERVAL); + while (!disposed_) { + SendHeartbeat(); + + // interruptable wait + unique_lock lock(mtx_); + cv_shutdown_.wait_for(lock, heartbeatInterval, + [this]() {return disposed_.load();}); + } + + AGENT_LOG_DEBUG("heartbeat thread ended"); +} + +void DefaultChannel::SendHeartbeat() { + // reopen channel if disconnected + if (!open_) { + if (Open() != PresenterErrorCode::kNone) { + return; + } + } + + // construct a heartbeat message then send it + proto::HeartbeatMessage heartbeat_msg; + SendMessage(heartbeat_msg); +} + +PresenterErrorCode DefaultChannel::SendMessage(const Message& message) { + PartialMessageWithTlvs msg; + string msg_name = message.GetDescriptor()->full_name(); + AGENT_LOG_DEBUG("To send message: %s", msg_name.c_str()); + msg.message = &message; + return SendMessage(msg); +} + +PresenterErrorCode DefaultChannel::SendMessage( + const PartialMessageWithTlvs& message) { + if (!open_) { + AGENT_LOG_ERROR("Channel is not open, send message failed"); + return PresenterErrorCode::kConnection; + } + + PresenterErrorCode errorCode = PresenterErrorCode::kOther; + try { + errorCode = conn_->SendMessage(message); + //connect error, set is_open to false, enable retry + if (errorCode == PresenterErrorCode::kConnection) { + open_ = false; + } + } catch (std::exception &e) { // protobuf may throw FatalException + AGENT_LOG_ERROR("Protobuf error: %s", e.what()); + open_ = false; + } + + return errorCode; +} + +PresenterErrorCode DefaultChannel::ReceiveMessage( + unique_ptr& message) { + AGENT_LOG_DEBUG("To receive message"); + if (!open_) { + AGENT_LOG_ERROR("Channel is not open, receive message failed"); + return PresenterErrorCode::kConnection; + } + + PresenterErrorCode error_code = PresenterErrorCode::kOther; + try { + error_code = conn_->ReceiveMessage(message); + // connect error and codec error, set is_open to false, enable retry + if (error_code == PresenterErrorCode::kConnection + || error_code == PresenterErrorCode::kCodec) { + open_ = false; + } + + } catch (std::exception &e) { // protobuf may throw FatalException + AGENT_LOG_ERROR("Protobuf error: %s", e.what()); + open_ = false; + } + + return error_code; +} + +PresenterErrorCode DefaultChannel::SendMessage( + const google::protobuf::Message& message, + std::unique_ptr &response) { + string msg_name = message.GetDescriptor()->full_name(); + AGENT_LOG_DEBUG("To send message: %s", msg_name.c_str()); + PresenterErrorCode error_code = SendMessage(message); + if (error_code == PresenterErrorCode::kNone) { + error_code = ReceiveMessage(response); + } + + return error_code; +} + +PresenterErrorCode DefaultChannel::SendMessage( + const PartialMessageWithTlvs& message, + std::unique_ptr &response) { + string msg_name = message.message->GetDescriptor()->full_name(); + AGENT_LOG_DEBUG("To send message: %s", msg_name.c_str()); + PresenterErrorCode error_code = SendMessage(message); + if (error_code == PresenterErrorCode::kNone) { + error_code = ReceiveMessage(response); + } + + return error_code; +} + +const std::string& DefaultChannel::GetDescription() const { + return this->description_; +} + +void DefaultChannel::SetDescription(const std::string& desc) { + this->description_ = desc; +} + +} +/* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/default_channel.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/default_channel.h new file mode 100644 index 0000000000000000000000000000000000000000..88348b00e9e84d388a35677d0bded432e965829c --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/channel/default_channel.h @@ -0,0 +1,192 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_CHANNEL_DEFAULT_CHANNEL_H_ +#define ASCENDDK_PRESENTER_AGENT_CHANNEL_DEFAULT_CHANNEL_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "ascenddk/presenter/agent/connection/connection.h" +#include "ascenddk/presenter/agent/channel.h" + +namespace ascend { +namespace presenter { + +/** + * Default channel implementation + */ +class DefaultChannel : public Channel { + public: + + /** + * @brief create a channel + * @param [in] host_ip host IP of server + * @param [in] port port of server + * @param [in] handler init handler + * @return pointer to channel + */ + static DefaultChannel* NewChannel( + const std::string& host_ip, uint16_t port, + std::shared_ptr handler); + + virtual ~DefaultChannel(); + + /** + * @brief Open channel + * @return PresenterErrorCode + */ + virtual PresenterErrorCode Open() override; + + /** + * @brief send message to server + * @param [in] message message + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const google::protobuf::Message& message) override; + + /** + * @brief send message to server + * @param [in] message message + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage(const PartialMessageWithTlvs& message) + override; + + /** + * @brief send message to server and read the response + * @param [in] message message + * @pararm [out] response response + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const google::protobuf::Message& message, + std::unique_ptr& response) override; + + /** + * @brief send message to server and read the response + * @param [in] message message + * @pararm [out] response response + * @return PresenterErrorCode + */ + virtual PresenterErrorCode SendMessage( + const PartialMessageWithTlvs& message, + std::unique_ptr& response) override; + + /** + * @brief recevice a response + * @param [out] response response + * @return PresenterErrorCode + */ + virtual PresenterErrorCode ReceiveMessage( + std::unique_ptr& response) override; + + /** + * @brief set InitChannelHandler + * @param [in] handler handler + */ + void SetInitChannelHandler(std::shared_ptr handler); + + /** + * @brief get InitChannelHandler + * @return InitChannelHandler + */ + std::shared_ptr GetInitChannelHandler(); + + /** + * @brief set description + * @param [in] desc description + */ + void SetDescription(const std::string& desc); + + /** + * @brief Get the description of the channel, can be used for logging + * @return description + */ + const std::string& GetDescription() const override; + + private: + /** + * @brief constructor + * @param [in] socket_factory socket factory + */ + DefaultChannel(std::shared_ptr socket_factory); + + /** + * @brief handle channel initialization process + */ + PresenterErrorCode HandleInitialization( + const google::protobuf::Message& message); + + /** + * @brief Start heartbeat thread + */ + void StartHeartbeatThread(); + + /** + * @brief Task to keep the channel alive + */ + void KeepAlive(); + + /** + * @brief Send heartbeat message to server + */ + void SendHeartbeat(); + + private: + std::shared_ptr socket_factory_; + std::shared_ptr init_channel_handler_; + std::unique_ptr conn_; + + // indicating whether the socket is valid + std::atomic_bool open_; + // indicating whether channel is valid + std::atomic_bool disposed_; + + std::mutex mtx_; + std::condition_variable cv_shutdown_; + std::unique_ptr heartbeat_thread_; + + std::string description_; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_CHANNEL_DEFAULT_CHANNEL_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/codec/message_codec.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/codec/message_codec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c895905813ed18d0c5f1732eaf701d70040aa2e2 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/codec/message_codec.cpp @@ -0,0 +1,214 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/codec/message_codec.h" + +#include +#include +#include + +#include "ascenddk/presenter/agent/util/logging.h" + +using namespace google::protobuf; +using namespace google::protobuf::io; +using namespace std; + +namespace { +const int kMessageNameLengthSize = sizeof(uint8_t); + +// protobuf tag size +const int kTagSize = 1; + +// protobuf string/bytes wire type +const int kProtoStringWireType = 0x2; + +// max buffer size for varint32 +static const int kMaxVarint32Bytes = 5; + +// for calc tag +const int kTagShift = 3; + +// empty string +const string kEmptyStr = ""; +} + +namespace ascend { +namespace presenter { + +// make protobuf tag, tag should less than 15 +static uint8_t MakeTag(int tag) { + return static_cast(tag) << kTagShift | kProtoStringWireType; +} + +static string ConvertToVarint32(uint32_t value) { + // Zero-length field should not be serialized + if (value == 0) { + return kEmptyStr; + } + + char buf[kMaxVarint32Bytes]; + ArrayOutputStream arr(buf, kMaxVarint32Bytes); + CodedOutputStream os(&arr); + os.WriteVarint32(value); + // os.ByteCount() <= kMaxVarint32Bytes + return string(buf, os.ByteCount()); +} + +SharedByteBuffer MessageCodec::EncodeTagAndLength(const Tlv& tlv) { + string varlen = ConvertToVarint32(tlv.length); + if (varlen.empty()) { + AGENT_LOG_ERROR("length is 0"); + return SharedByteBuffer(); + } + + uint32_t size = kTagSize + varlen.size(); + SharedByteBuffer result = SharedByteBuffer::Make(size); + if (result.IsEmpty()) { + return result; + } + + ByteBufferWriter buffer(result.GetMutable(), size); + // put tag + buffer.PutUInt8(MakeTag(tlv.tag)); + // put var length + buffer.PutString(string(varlen.c_str(), varlen.size())); + return result; +} + +SharedByteBuffer MessageCodec::EncodeMessage( + const google::protobuf::Message& message) { + PartialMessageWithTlvs msg; + msg.message = &message; + return EncodeMessage(msg); +} + +SharedByteBuffer MessageCodec::EncodeMessage( + const PartialMessageWithTlvs& msg) { + if (msg.message == nullptr) { + return SharedByteBuffer(); + } + + const Message& message = *(msg.message); + vector tlv_list = msg.tlv_list; + + string name = message.GetDescriptor()->full_name(); + uint32_t msg_size = static_cast(message.ByteSize()); + uint8_t msg_name_size = static_cast(name.size()); + + // calc total size + uint32_t encode_size = kPacketLengthSize + kMessageNameLengthSize; + encode_size += msg_name_size + msg_size; + + uint32_t total_size = encode_size; + // if has additional field + if (!tlv_list.empty()) { + for (auto it = tlv_list.begin(); it != tlv_list.end(); ++it) { + string varlen = ConvertToVarint32(it->length); + total_size = total_size + kTagSize + varlen.size() + it->length; + } + } + + SharedByteBuffer encode_buffer = SharedByteBuffer::Make(encode_size); + if (encode_buffer.IsEmpty()) { + return encode_buffer; + } + + // serialize message + ByteBufferWriter buffer(encode_buffer.GetMutable(), encode_size); + buffer.PutUInt32(total_size); + buffer.PutUInt8(msg_name_size); + buffer.PutString(name); + // serialization may fail if any of the required field is not set, + // in which case, a empty buffer is returned + if (!buffer.PutMessage(message)) { + return SharedByteBuffer(); + } + + return encode_buffer; +} + +// Generate message prototype by name for parsing +static Message* NewMessageByName(const string& name) { + const Descriptor* descriptor = DescriptorPool::generated_pool() + ->FindMessageTypeByName(name); + if (descriptor != nullptr) { + const Message* prototype = + MessageFactory::generated_factory()->GetPrototype(descriptor); + if (descriptor != nullptr) { + return prototype->New(); + } + } + + return nullptr; +} + +Message* MessageCodec::DecodeMessage(const char* data, int size) { + if (size < kMessageNameLengthSize) { + AGENT_LOG_ERROR("Insufficient data for message name length field"); + return nullptr; + } + + // wrap message data with Reader + ByteBufferReader buffer(data, size); + + // read message name length + uint8_t msg_name_length = buffer.ReadUInt8(); + if (buffer.RemainingBytes() < msg_name_length) { + AGENT_LOG_ERROR( + "Insufficient data for name field, expect %d, but remain %d", + msg_name_length, buffer.RemainingBytes()); + return nullptr; + } + + // read message name + string name = buffer.ReadString(msg_name_length); + // get message prototype by name + Message* message = NewMessageByName(name); + if (message == nullptr) { + AGENT_LOG_ERROR("Unsupported message, name = %s", name.c_str()); + return nullptr; + } + + // parse message + if (!buffer.ReadMessage(buffer.RemainingBytes(), *message)) { + AGENT_LOG_ERROR("Failed to parse message, name = %s", name.c_str()); + delete message; + return nullptr; + } + + return message; +} + +} /* namespace presenter */ +} /* namespace ascend */ + diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/codec/message_codec.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/codec/message_codec.h new file mode 100644 index 0000000000000000000000000000000000000000..fa712cf7f4947009f86fceb54d9f919a1c28f4d3 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/codec/message_codec.h @@ -0,0 +1,101 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_CODEC_MESSAGE_CODEC_H_ +#define ASCENDDK_PRESENTER_AGENT_CODEC_MESSAGE_CODEC_H_ + +#include +#include + +#include "ascenddk/presenter/agent/channel.h" +#include "ascenddk/presenter/agent/util/byte_buffer.h" + +namespace ascend { +namespace presenter { + +/** + * MessageCodec for encoding and decoding message + * + * A message has the following structure + * -------------------------------------------------------------------- + * |Field Name | Size(bytes) | Type | + * -------------------------------------------------------------------- + * |total message len | 4 | uint32 | + * |------------------------------------------------------------------- + * |message name len | 1 | uint8 | + * |------------------------------------------------------------------- + * |message name | Var. max 255 | String, NO terminated '\0' | + * |------------------------------------------------------------------- + * |message body | Var. | Bytes. Encoded by protobuf | + * -------------------------------------------------------------------- + */ +class MessageCodec { + public: + // size of channel message total length + static const int kPacketLengthSize = sizeof(uint32_t); + + /** + * @brief Encode the message to a ByteBuffer + * @param [in] message message + * @return ByteBuffer. Empty if encode failed + */ + SharedByteBuffer EncodeMessage(const google::protobuf::Message& message); + + /** + * @brief Encode the message to a ByteBuffer + * @param [in] message message + * @return ByteBuffer. Empty if encode failed + */ + SharedByteBuffer EncodeMessage(const PartialMessageWithTlvs& message); + + /** + * @brief Encode the tag and length to a ByteBuffer + * @param [in] Tlv Tlv + * @return ByteBuffer. Empty if encode failed + */ + SharedByteBuffer EncodeTagAndLength(const Tlv& tlv); + + /** + * @brief Decode the message from buffer + * @param [in] data data buffer + * @param [in] size data size + * @return Message. NULL if decode failed + */ + google::protobuf::Message* DecodeMessage(const char* data, int size); + +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_CODEC_MESSAGE_CODEC_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/connection/connection.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/connection/connection.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5515a37da044f80b7f0b7349c77fc9c07e3067b2 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/connection/connection.cpp @@ -0,0 +1,192 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/connection/connection.h" + +#include +#include + +#include "ascenddk/presenter/agent/codec/message_codec.h" +#include "ascenddk/presenter/agent/net/raw_socket_factory.h" +#include "ascenddk/presenter/agent/util/byte_buffer.h" +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/mem_utils.h" + + +namespace { + const uint32_t kMaxPacketSize = 1024 * 1024 * 10; //10MB +} + +namespace ascend { +namespace presenter { + +using google::protobuf::Message; +using namespace std; + +Connection::Connection(Socket* socket) + : socket_(socket) { +} + +Connection* Connection::New(Socket* socket) { + if (socket == nullptr) { + AGENT_LOG_ERROR("socket is null"); + return nullptr; + } + + return new (nothrow) Connection(socket); +} + +PresenterErrorCode Connection::SendTlvList(const std::vector& tlv_list) { + if (tlv_list.empty()) { + return PresenterErrorCode::kNone; + } + + for (auto it = tlv_list.begin(); it != tlv_list.end(); ++it) { + SharedByteBuffer tlv_buf = codec_.EncodeTagAndLength(*it); + if (tlv_buf.IsEmpty()) { + AGENT_LOG_ERROR("Failed to encode TLV"); + return PresenterErrorCode::kCodec; + } + + //send tag and length + PresenterErrorCode error_code = socket_->Send(tlv_buf.Get(), + tlv_buf.Size()); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to send TLV tag and length"); + return error_code; + } + + //send value + error_code = socket_->Send(it->value, it->length); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to send TLV value"); + return error_code; + } + } + + return PresenterErrorCode::kNone; +} + +PresenterErrorCode Connection::SendMessage( + const PartialMessageWithTlvs& proto_message) { + if (proto_message.message == nullptr) { + AGENT_LOG_ERROR("message is null"); + return PresenterErrorCode::kInvalidParam; + } + // lock for encoding and sending + unique_lock lock(mtx_); + + const char* msg_name = proto_message.message->GetDescriptor()->name().c_str(); + SharedByteBuffer buffer = codec_.EncodeMessage(proto_message); + if (buffer.IsEmpty()) { + AGENT_LOG_ERROR("Failed to encode message: %s", msg_name); + return PresenterErrorCode::kCodec; + } + + // send message + PresenterErrorCode error_code = socket_->Send(buffer.Get(), buffer.Size()); + // if send success and has more to send.. + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to send message: %s", msg_name); + return error_code; + } + + return SendTlvList(proto_message.tlv_list); +} + +PresenterErrorCode Connection::SendMessage(const Message& message) { + PartialMessageWithTlvs msg; + msg.message = &message; + return SendMessage(msg); +} + +PresenterErrorCode Connection::ReceiveMessage( + unique_ptr<::google::protobuf::Message>& message) { + // read 4 bytes header + char *buf = recv_buf_; + PresenterErrorCode error_code = socket_->Recv( + buf, MessageCodec::kPacketLengthSize); + + if (error_code == PresenterErrorCode::kSocketTimeout) { + AGENT_LOG_INFO("Read message header timeout"); + return PresenterErrorCode::kSocketTimeout; + } + + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to read message header"); + return error_code; + } + + // parse length + uint32_t total_size = ntohl(*((uint32_t*) buf)); + + // read the remaining data + uint32_t remaining_size = total_size - MessageCodec::kPacketLengthSize; + if (remaining_size == 0 || remaining_size > kMaxPacketSize) { + AGENT_LOG_ERROR("received malformed message, size field = %u", total_size); + return PresenterErrorCode::kCodec; + } + + int pack_size = static_cast(remaining_size); + unique_ptr unique_buf; // ensure release allocated buffer + if (remaining_size > kBufferSize) { + buf = memutils::NewArray(remaining_size); + if (buf == nullptr) { + return PresenterErrorCode::kBadAlloc; + } + + unique_buf.reset(memutils::NewArray(remaining_size)); + } + + // packSize must be within [1, MAX_PACKET_SIZE], + // Recv() can not cause buffer overflow + error_code = socket_->Recv(buf, pack_size); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to read whole message"); + return PresenterErrorCode::kConnection; + } + + // Decode message + Message* msg = codec_.DecodeMessage(buf, pack_size); + if (msg == nullptr) { + return PresenterErrorCode::kCodec; + } + + message.reset(msg); + string name = message->GetDescriptor()->name(); + AGENT_LOG_DEBUG("Message received, name = %s", name.c_str()); + return PresenterErrorCode::kNone; +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/connection/connection.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/connection/connection.h new file mode 100644 index 0000000000000000000000000000000000000000..7f4a1c7df13a78102a761b3607135dfbf6c3c696 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/connection/connection.h @@ -0,0 +1,112 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_CONNECTION_CONNECTION_H_ +#define ASCENDDK_PRESENTER_AGENT_CONNECTION_CONNECTION_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "ascenddk/presenter/agent/codec/message_codec.h" +#include "ascenddk/presenter/agent/errors.h" +#include "ascenddk/presenter/agent/net/socket_factory.h" + +namespace ascend { +namespace presenter { + +/** + * Connection between agent and server + * provide protobuf based interface + */ +class Connection { + public: + static Connection* New(Socket* socket); + ~Connection() = default; + + /** + * @brief Send a protobuf Message to presenter server + * @param [in] message protobuf message + * @return PresenterErrorCode + */ + PresenterErrorCode SendMessage(const ::google::protobuf::Message& message); + + /** + * @brief Send a Message to presenter server + * @param [in] message PartialMessageWithTlvs + * @return PresenterErrorCode + */ + PresenterErrorCode SendMessage(const PartialMessageWithTlvs& message); + + /** + * @brief Receive a message from presenter server + * @param [out] message response message + * @return PresenterErrorCode + */ + PresenterErrorCode ReceiveMessage( + std::unique_ptr<::google::protobuf::Message>& message); + + private: + PresenterErrorCode DoSendMessage(const ::google::protobuf::Message& message, + const std::vector& tlv_list); + + private: + Connection(Socket* socket); + + /** + * @brief Send tlv in protobuf format to server + * @param [out] message response message + * @return PresenterErrorCode + */ + PresenterErrorCode SendTlvList(const std::vector& tlv_list); + + // max size of received message + static const int kBufferSize = 1024; + + std::unique_ptr socket_; + + char recv_buf_[kBufferSize] = { 0 }; + + std::mutex mtx_; + + MessageCodec codec_; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_CONNECTION_CONNECTION_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket.cpp new file mode 100644 index 0000000000000000000000000000000000000000..16f84c71617cbab23de8c8d5a1685449bf6daeb6 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket.cpp @@ -0,0 +1,63 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/net/raw_socket.h" + +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/socket_utils.h" + +namespace ascend { +namespace presenter { + +RawSocket* RawSocket::New(int socket) { + return new (std::nothrow) RawSocket(socket); +} + +RawSocket::RawSocket(int socket) + : socket_(socket) { +} + +RawSocket::~RawSocket() { + socketutils::CloseSocket(socket_); +} + +int RawSocket::DoSend(const char* data, int size) { + return socketutils::WriteN(socket_, data, size); +} + +int RawSocket::DoRecv(char* buf, int size) { + return socketutils::ReadN(socket_, buf, size); +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket.h new file mode 100644 index 0000000000000000000000000000000000000000..af96248ff75ac987596507ba06a146f23c156f57 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket.h @@ -0,0 +1,94 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_NET_RAW_SOCKET_H_ +#define ASCENDDK_PRESENTER_AGENT_NET_RAW_SOCKET_H_ + +#include "ascenddk/presenter/agent/errors.h" +#include "ascenddk/presenter/agent/net/socket.h" + +namespace ascend { +namespace presenter { + +/** + * RawSocket, the data is not encrypted + */ +class RawSocket : public Socket { + public: + /** + * @brief Factory method + * @param [in] socket socket file descriptor + */ + static RawSocket* New(int socket); + + /** + * @brief Constructor + * @param [in] socket socket file descriptor + */ + explicit RawSocket(int socket); + + // Disable copy constructor and assignment operator + RawSocket(const RawSocket& other) = delete; + RawSocket& operator=(const RawSocket& other) = delete; + + /** + * @brief Destructor + */ + virtual ~RawSocket(); + + protected: + + /** + * @brief Read bytes from socket + * @param [in] buffer receive buffer + * @param [in] size expected bytes + * @return bytes received. -1 of read failed + */ + virtual int DoRecv(char *buffer, int size) override; + + /** + * @brief Write bytes to socket + * @param [in] data bytes to send + * @param [in] size size of data + * @return bytes sent. -1 if send failed + */ + virtual int DoSend(const char *data, int size) override; + + private: + int socket_; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_NET_RAW_SOCKET_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket_factory.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket_factory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e2848caa14b82f07bd8e62d63007ae7166a7e119 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket_factory.cpp @@ -0,0 +1,72 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/net/raw_socket_factory.h" + +#include +#include +#include + +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/socket_utils.h" + +using std::string; + +namespace ascend { +namespace presenter { + +RawSocketFactory::RawSocketFactory(const string& host_ip, uint16_t port) + : host_ip_(host_ip), + port_(port) { +} + +// overrided method of Create() +RawSocket* RawSocketFactory::Create() { + // create a socket and connect to server + int sock = CreateSocket(host_ip_, port_); + if (sock == socketutils::kSocketError) { + return nullptr; + } + + // No error, create RawSocket and return + RawSocket *ret = RawSocket::New(sock); + if (ret == nullptr) { + (void) close(sock); + SetErrorCode(PresenterErrorCode::kBadAlloc); + } + + return ret; +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket_factory.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket_factory.h new file mode 100644 index 0000000000000000000000000000000000000000..5c58f2163ffedd35cbad7976e21da3c495a4834c --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/raw_socket_factory.h @@ -0,0 +1,72 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#ifndef ASCENDDK_PRESENTER_AGENT_NET_RAW_SOCKET_FACTORY_H_ +#define ASCENDDK_PRESENTER_AGENT_NET_RAW_SOCKET_FACTORY_H_ + +#include +#include + +#include "ascenddk/presenter/agent/net/raw_socket.h" +#include "ascenddk/presenter/agent/net/socket_factory.h" + +namespace ascend { +namespace presenter { + +/** + * Factory of RawSocket + */ +class RawSocketFactory : public SocketFactory { + public: + /** + * @brief Constructor + * @param hostIp host IP + * @param port port + */ + RawSocketFactory(const std::string& host_ip, uint16_t port); + + /** + * @brief Create instance of RawSocket, If NULL is returned, + * Invoke GetErrorCode() for error code + * @return pointer of RawSocket + */ + virtual RawSocket* Create() override; + + private: + std::string host_ip_; + uint16_t port_; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_NET_RAW_SOCKET_FACTORY_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a97bfcbef7f18e8733d201d44308cff6ab3f4fa4 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket.cpp @@ -0,0 +1,80 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/net/socket.h" + +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/socket_utils.h" + +namespace ascend { +namespace presenter { + +PresenterErrorCode Socket::Send(const char *data, int size) { + int ret = DoSend(data, size); + if (ret == socketutils::kSocketError) { + return PresenterErrorCode::kConnection; + } + + // check size of sent data + if (ret < size) { + AGENT_LOG_ERROR("Socket::Send() error, expect %d bytes, but sent %d", size, + ret); + return PresenterErrorCode::kConnection; + } + + AGENT_LOG_DEBUG("Socket::Send() succeeded, size = %d", size); + return PresenterErrorCode::kNone; +} + +PresenterErrorCode Socket::Recv(char *buffer, int size) { + int ret = DoRecv(buffer, size); + if (ret == socketutils::kSocketError) { + return PresenterErrorCode::kConnection; + } else if (ret == socketutils::kSocketTimeout) { + return PresenterErrorCode::kSocketTimeout; + } + + // check size of received data + if (ret < size) { + AGENT_LOG_ERROR("Socket::Recv() error, expect %d bytes, but received %d", + size, ret); + return PresenterErrorCode::kConnection; + } + + AGENT_LOG_DEBUG("Socket::Recv() succeeded, size = %d", size); + return PresenterErrorCode::kNone; +} + +} /* namespace presenter */ +} /* namespace ascend */ + diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket.h new file mode 100644 index 0000000000000000000000000000000000000000..6818697f786b7351546707fb8f4ed40ad11c5e27 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket.h @@ -0,0 +1,97 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_NET_SOCKET_H_ +#define ASCENDDK_PRESENTER_AGENT_NET_SOCKET_H_ + +#include +#include + +#include "ascenddk/presenter/agent/errors.h" + +namespace ascend { +namespace presenter { + +/** + * Abstract Socket Class + * Subclasses can override protected method to implement socket with SSL + */ +class Socket { + public: + Socket() = default; + virtual ~Socket() = default; + + // Disable copy constructor and assignment operator + Socket(const Socket& other) = delete; + Socket& operator=(const Socket& other) = delete; + + /** + * @brief Read bytes from socket + * @param [in] buffer receive buffer + * @param [in] size expected bytes + * @return PresenterErrorCode + */ + PresenterErrorCode Send(const char *data, int size); + + /** + * @brief Write bytes to socket + * @param [in] data bytes to send + * @param [in] size size of data + * @return PresenterErrorCode + */ + PresenterErrorCode Recv(char *buf, int size); + + protected: + + /** + * @brief Read bytes from socket + * @param [in] buffer receive buffer + * @param [in] size expected bytes + * @return bytes received + */ + virtual int DoRecv(char *buffer, int size) = 0; + + /** + * @brief Write bytes to socket + * @param [in] data bytes to send + * @param [in] size size of data + * @return bytes sent + */ + virtual int DoSend(const char *data, int size) = 0; + +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_NET_SOCKET_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket_factory.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket_factory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..44267793bd31cdc3deac0730587b783d3cfc716f --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket_factory.cpp @@ -0,0 +1,114 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/net/socket_factory.h" + +#include +#include +#include +#include +#include + +#include "ascenddk/presenter/agent/errors.h" +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/socket_utils.h" + +using namespace std; + +namespace ascend { +namespace presenter { + +// anonymous namespace for constants +namespace { + +// Default Socket Timeout +const int kDefaultTimeoutInSec = 3; + +} /* anonymous namespace */ + +PresenterErrorCode SocketFactory::GetErrorCode() const { + return error_code_; +} + +void SocketFactory::SetErrorCode(PresenterErrorCode error_code) { + this->error_code_ = error_code; +} + +// common function for creating a socket with given hostIp and port +int SocketFactory::CreateSocket(const string& host_ip, uint16_t port) { + // parse address + sockaddr_in addr; + if (!socketutils::SetSockAddr(host_ip.c_str(), port, addr)) { + AGENT_LOG_ERROR("Invalid address: %s:%d", host_ip.c_str(), port); + SetErrorCode(PresenterErrorCode::kInvalidParam); + return socketutils::kSocketError; + } + + // create socket file descriptor + int sock = socketutils::CreateSocket(); + if (sock == socketutils::kSocketError) { + AGENT_LOG_ERROR("socket() error: %s", strerror(errno)); + SetErrorCode(PresenterErrorCode::kConnection); + return socketutils::kSocketError; + } + + // reuse address + socketutils::SetSocketReuseAddr(sock); + + // set timeout + socketutils::SetSocketTimeout(sock, kDefaultTimeoutInSec); + + // do connect + if (socketutils::Connect(sock, addr) == socketutils::kSocketError) { + if (errno == EINVAL) { + SetErrorCode(PresenterErrorCode::kInvalidParam); + } else { + SetErrorCode(PresenterErrorCode::kConnection); + } + + AGENT_LOG_ERROR("Failed to connect to server: %s:%u", host_ip.c_str(), port); + + // connect failed, close socket + (void) close(sock); + return socketutils::kSocketError; + } + + // connect successfully + SetErrorCode(PresenterErrorCode::kNone); + AGENT_LOG_INFO("Connected to server %s:%d, socket file descriptor = %d", + host_ip.c_str(), port, sock); + return sock; +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket_factory.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket_factory.h new file mode 100644 index 0000000000000000000000000000000000000000..100cde9ef563bcd5cde66ff9f31c3be9f9250a23 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/net/socket_factory.h @@ -0,0 +1,94 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_NET_SOCKET_FACTORY_H_ +#define ASCENDDK_PRESENTER_AGENT_NET_SOCKET_FACTORY_H_ + +#include "ascenddk/presenter/agent/net/socket.h" + +#include +#include + + +namespace ascend { +namespace presenter { + +/** + * Abstract SocketFactory for creating Socket + * Subclasses implement Create() to return concrete instance + */ +class SocketFactory { + public: + + /** + * Destructor + */ + virtual ~SocketFactory() = default; + + /** + * @brief Create instance of Socket, If NULL is returned, + * invoke GetErrorCode() for error code + * @return pointer of Socket + */ + virtual Socket* Create() = 0; + + /** + * @brief Get error code + */ + PresenterErrorCode GetErrorCode() const; + + protected: + + /** + * @brief create a socket and connect to server + * @param [in] host_ip host IP + * @param [in] port port + * @return socket file descriptor, if SOCKET_ERROR(-1) is returned, + * invoke GetErrorCode() for error code + */ + int CreateSocket(const std::string& host_ip, std::uint16_t port); + + /** + * @brief Set error code + * @param[in] error_code error code + */ + void SetErrorCode(PresenterErrorCode error_code); + + private: + PresenterErrorCode error_code_ = PresenterErrorCode::kNone; +}; + +} +} + +#endif /* ASCENDDK_PRESENTER_AGENT_NET_SOCKET_FACTORY_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2056a18b6022d8ecccd26bbd9f4a4fafd614ea03 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel.cpp @@ -0,0 +1,197 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/presenter_channel.h" + +#include +#include + +#include "ascenddk/presenter/agent/channel/default_channel.h" +#include "ascenddk/presenter/agent/net/raw_socket_factory.h" +#include "ascenddk/presenter/agent/presenter/presenter_channel_init_handler.h" +#include "ascenddk/presenter/agent/presenter/presenter_message_helper.h" +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/parse_config.h" + +using namespace std; +using namespace google::protobuf; + +namespace ascend { +namespace presenter { + +PresenterErrorCode CreateChannel(Channel *&channel, + const OpenChannelParam ¶m) { + std::shared_ptr handler = make_shared< + PresentChannelInitHandler>(param); + + DefaultChannel *ch = DefaultChannel::NewChannel(param.host_ip, param.port, handler); + if (ch == nullptr) { + AGENT_LOG_ERROR("Channel new() error"); + return PresenterErrorCode::kBadAlloc; + } + + // OpenChannelParam to string + std::stringstream ss; + ss << "PresenterChannelImpl: {"; + ss << "server: " << param.host_ip << ":" << param.port; + ss << ", channel: " << param.channel_name; + ss << ", content_type: " << static_cast(param.content_type); + ss << "}"; + ch->SetDescription(ss.str()); + channel = ch; + return PresenterErrorCode::kNone; +} + +PresenterErrorCode OpenChannel(Channel *&channel, + const OpenChannelParam ¶m) { + + // If the channel is not NULL, we cannot know whether it is actually + // point to something. We cannot be sure whether it is safe to simply + // delete that, so a kPresenterErrorInvalidParams will be returned + if (channel != nullptr) { + AGENT_LOG_ERROR("channel is not NULL"); + return PresenterErrorCode::kInvalidParam; + } + + // allocate channel object + PresenterErrorCode error_code = CreateChannel(channel, param); + if (error_code != PresenterErrorCode::kNone) { + return error_code; + } + + string channelDesc = channel->GetDescription(); + + // Try Open Channel + AGENT_LOG_INFO("To Open channel: %s", channelDesc.c_str()); + + error_code = channel->Open(); + + // If failed, the channel object need to be released + if (error_code != PresenterErrorCode::kNone) { + if (error_code == PresenterErrorCode::kAppDefinedError) { + DefaultChannel *ch = dynamic_cast(channel); + error_code = dynamic_pointer_cast( + ch->GetInitChannelHandler())->GetErrorCode(); + } + + AGENT_LOG_ERROR("OpenChannel Failed, channel = %s, error_code = %d", + channelDesc.c_str(), error_code); + delete channel; + channel = nullptr; + return error_code; + } + + AGENT_LOG_INFO("Channel opened, channel = %s", channelDesc.c_str()); + return PresenterErrorCode::kNone; +} + +PresenterErrorCode OpenChannelByConfig(Channel*& channel, + const char* configFile) { + map config; + ReadConfig(config, configFile); + + OpenChannelParam param; + map::const_iterator mIter = config.begin(); + for (; mIter != config.end(); ++mIter) { + if (mIter->first == "presenter_server_ip") { + param.host_ip = mIter->second; + AGENT_LOG_INFO("presenter_server_ip config string:%s", mIter->second.c_str()); + } + else if (mIter->first == "presenter_server_port") { + param.port = std::stoi(mIter->second); + AGENT_LOG_INFO("presenter_server_port config string:%s", mIter->second.c_str()); + } + else if (mIter->first == "channel_name") { + param.channel_name = mIter->second; + AGENT_LOG_INFO("channel_name config string:%s", mIter->second.c_str()); + } + else if (mIter->first == "content_type") { + param.content_type = static_cast(std::stoi(mIter->second)); + AGENT_LOG_INFO("content_type config string:%s", mIter->second.c_str()); + printf("content_type config string:%s\n", mIter->second.c_str()); + } + } + + return OpenChannel(channel, param); +} + +PresenterErrorCode PresentImage(Channel *channel, const ImageFrame &image) { + if (channel == nullptr) { + AGENT_LOG_ERROR("channel is NULL"); + return PresenterErrorCode::kInvalidParam; + } + + proto::PresentImageRequest req; + if (!PresenterMessageHelper::InitPresentImageRequest(req, image)) { + return PresenterErrorCode::kInvalidParam; + } + + Tlv tlv; + tlv.tag = proto::PresentImageRequest::kDataFieldNumber; + tlv.length = image.size; + tlv.value = reinterpret_cast(image.data); + + PartialMessageWithTlvs message; + message.message = &req; + message.tlv_list.push_back(tlv); + + std::unique_ptr recv_message; + PresenterErrorCode error_code = channel->SendMessage(message, recv_message); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to present image, error = %d", error_code); + return error_code; + } + + return PresenterMessageHelper::CheckPresentImageResponse(*recv_message); +} + +PresenterErrorCode SendMessage( + Channel *channel, const google::protobuf::Message& message) { + if (channel == nullptr) { + AGENT_LOG_ERROR("channel is NULL"); + return PresenterErrorCode::kInvalidParam; + } + + unique_ptr resp; + PresenterErrorCode error_code = channel->SendMessage(message, resp); + if (error_code != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("Failed to present image, error = %d", error_code); + return error_code; + } + + return PresenterMessageHelper::CheckPresentImageResponse(*resp); +} + +} +} + diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel_init_handler.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel_init_handler.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5f45a60fb6122f47a39bebdf535c5ac25fa4a5e7 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel_init_handler.cpp @@ -0,0 +1,78 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/presenter/presenter_channel_init_handler.h" + +#include "ascenddk/presenter/agent/presenter/presenter_message_helper.h" +#include "ascenddk/presenter/agent/util/logging.h" + +using google::protobuf::Message; + +namespace ascend { +namespace presenter { + +PresentChannelInitHandler::PresentChannelInitHandler( + const OpenChannelParam& param) + : param_(param) { +} + +google::protobuf::Message* PresentChannelInitHandler::CreateInitRequest() { + proto::OpenChannelRequest *req = + new (std::nothrow) proto::OpenChannelRequest(); + if (req != nullptr) { + error_code_ = PresenterMessageHelper::CreateOpenChannelRequest( + *req, param_.channel_name, param_.content_type); + + if (error_code_ != PresenterErrorCode::kNone) { + delete req; + return nullptr; + } + } + + return req; +} + +bool PresentChannelInitHandler::CheckInitResponse(const Message& response) { + error_code_ = PresenterMessageHelper::CheckOpenChannelResponse(response); + if (error_code_ != PresenterErrorCode::kNone) { + AGENT_LOG_ERROR("OpenChannel failed, error = %d", error_code_); + } + return error_code_ == PresenterErrorCode::kNone; +} + +PresenterErrorCode PresentChannelInitHandler::GetErrorCode() const { + return error_code_; +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel_init_handler.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel_init_handler.h new file mode 100644 index 0000000000000000000000000000000000000000..f9472cc6dfe334e7f7f53075b759b10a20d1d742 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_channel_init_handler.h @@ -0,0 +1,84 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_PRESENTER_PRESENTER_CHANNEL_INIT_HANDLER_H_ +#define ASCENDDK_PRESENTER_AGENT_PRESENTER_PRESENTER_CHANNEL_INIT_HANDLER_H_ + +#include + +#include "ascenddk/presenter/agent/channel.h" +#include "ascenddk/presenter/agent/errors.h" +#include "ascenddk/presenter/agent/presenter_types.h" + +namespace ascend { +namespace presenter { + +/** + * Presenter Channel Init Handler + */ +class PresentChannelInitHandler : public InitChannelHandler { + public: + /** + * @brief Constructor + * @param [in] param Open channel parameter + */ + PresentChannelInitHandler(const OpenChannelParam& param); + + /** + * @brief Create OpenChannelRequest + * @return OpenChannelRequest + */ + google::protobuf::Message* CreateInitRequest() override; + + /** + * @brief Check OpenChannelResponse + * @param [in] response response + * @return check result + */ + bool CheckInitResponse(const google::protobuf::Message& response) override; + + /** + * @brief Get ErrorCode + * @return PresenterErrorCode + */ + PresenterErrorCode GetErrorCode() const; + + private: + OpenChannelParam param_; + PresenterErrorCode error_code_ = PresenterErrorCode::kOther; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_PRESENTER_PRESENTER_CHANNEL_INIT_HANDLER_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_message_helper.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_message_helper.cpp new file mode 100644 index 0000000000000000000000000000000000000000..10173ab037fe00b33cfa585a03f25118a8bd4cd6 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_message_helper.cpp @@ -0,0 +1,170 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/presenter/presenter_message_helper.h" + +#include "ascenddk/presenter/agent/util/logging.h" + +using std::string; + +namespace ascend { +namespace presenter { + +PresenterErrorCode PresenterMessageHelper::CreateOpenChannelRequest( + proto::OpenChannelRequest& request, const string& channel_name, + ContentType content_type) { + // check channel name + if (channel_name.empty()) { + AGENT_LOG_ERROR("channel name is empty"); + return PresenterErrorCode::kInvalidParam; + } + + request.set_channel_name(channel_name); + + // set content type + if (content_type == ContentType::kImage) { + request.set_content_type(proto::kChannelContentTypeImage); + } else if (content_type == ContentType::kVideo) { + request.set_content_type(proto::kChannelContentTypeVideo); + } else { + AGENT_LOG_ERROR("Unsupported content type: %d", content_type); + return PresenterErrorCode::kInvalidParam; + } + + return PresenterErrorCode::kNone; +} + +bool PresenterMessageHelper::InitPresentImageRequest( + proto::PresentImageRequest& request, const ImageFrame& image) { + if (image.format == ImageFormat::kJpeg) { + request.set_format(proto::kImageFormatJpeg); + } else { // other formats is not supported + AGENT_LOG_ERROR("Unsupported image format: %d", image.format); + return false; + } + + // data can not be NULL + if (image.data == nullptr) { + AGENT_LOG_ERROR("Image data is NULL"); + return false; + } + + // size should greater than 0 + if (image.size == 0) { + AGENT_LOG_ERROR("Image data size is 0"); + return false; + } + + request.set_width(image.width); + request.set_height(image.height); + + // set the rectangle attr + proto::Rectangle_Attr *rectangle_attr = nullptr; + for (uint32_t i = 0; i < image.detection_results.size(); i++) + { + rectangle_attr = request.add_rectangle_list(); + rectangle_attr->mutable_left_top()-> set_x(image.detection_results[i].lt.x); + rectangle_attr->mutable_left_top()-> set_y(image.detection_results[i].lt.y); + rectangle_attr->mutable_right_bottom()->set_x(image.detection_results[i].rb.x); + rectangle_attr->mutable_right_bottom()->set_y(image.detection_results[i].rb.y); + rectangle_attr->set_label_text(image.detection_results[i].result_text); + } + // image.data may be too large to affect performance, so it is not set here + return true; +} + +PresenterErrorCode PresenterMessageHelper::TranslateErrorCode( + proto::OpenChannelErrorCode error_code) { + switch (error_code) { + case proto::kOpenChannelErrorNone: + return PresenterErrorCode::kNone; + case proto::kOpenChannelErrorChannelAlreadyOpened: + return PresenterErrorCode::kChannelAlreadyOpened; + case proto::kOpenChannelErrorNoSuchChannel: + return PresenterErrorCode::kNoSuchChannel; + default: + return PresenterErrorCode::kServerReturnedUnknownError; + } +} + +PresenterErrorCode PresenterMessageHelper::CheckOpenChannelResponse( + const ::google::protobuf::Message& msg) { + + string msg_name = msg.GetDescriptor()->full_name(); + // check response + if (msg_name != proto::OpenChannelResponse::descriptor()->full_name()) { + AGENT_LOG_ERROR("expecting OpenChannelResponse, but received %s", + msg_name.c_str()); + return PresenterErrorCode::kOther; + } + + const proto::OpenChannelResponse& resp = + static_cast(msg); + return TranslateErrorCode(resp.error_code()); +} + +PresenterErrorCode PresenterMessageHelper::TranslateErrorCode( + proto::PresentDataErrorCode error_code) { + if (error_code == proto::kPresentDataErrorNone) { + return PresenterErrorCode::kNone; + } + + AGENT_LOG_ERROR("Present Image failed. error code = %d", error_code); + + if (error_code == proto::kPresentDataErrorUnsupportedFormat || + error_code == proto::kPresentDataErrorUnsupportedType) { + return PresenterErrorCode::kInvalidParam; + } + + return PresenterErrorCode::kServerReturnedUnknownError; +} + +PresenterErrorCode PresenterMessageHelper::CheckPresentImageResponse( + const ::google::protobuf::Message& msg) { + // check response + string msg_name = msg.GetDescriptor()->full_name(); + + // if the received message is not of the desired type + if (msg_name != proto::PresentImageResponse::descriptor()->full_name()) { + AGENT_LOG_ERROR("expecting PresentImageResponse, but received %s", + msg_name.c_str()); + return PresenterErrorCode::kOther; + } + + const proto::PresentImageResponse& resp = + static_cast(msg); + return TranslateErrorCode(resp.error_code()); +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_message_helper.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_message_helper.h new file mode 100644 index 0000000000000000000000000000000000000000..fab2144ee7eeb55b5bcebcc4d586d6a1fd774ee5 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/presenter/presenter_message_helper.h @@ -0,0 +1,113 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef SRC_CHANNEL_PRESENTERMESSAGEHELPER_H_ +#define SRC_CHANNEL_PRESENTERMESSAGEHELPER_H_ + +#include + +#include "ascenddk/presenter/agent/errors.h" +#include "ascenddk/presenter/agent/presenter_types.h" +#include "proto/presenter_message.pb.h" + +namespace ascend { +namespace presenter { + +/** + * Helper class for Presenter Messages + */ +class PresenterMessageHelper { + public: + // helper class, constructor/destructor is not needed + PresenterMessageHelper() = delete; + ~PresenterMessageHelper() = delete; + + /** + * @brief create OpenChannelRequest + * @param [out] request request to set the properties + * @param [in] channelName channel name + * @param [in] contentType content type + * @return Shared pointer of OpenChannelRequest. nullptr is returned if + * any of the parameters is invalid + */ + static PresenterErrorCode CreateOpenChannelRequest( + proto::OpenChannelRequest& request, const std::string& channel_name, + ContentType content_type); + + /** + * @brief create PresentImageRequest + * @param [out] request request to set the properties + * @param [in] image image + * @return true: success, false: failure + */ + static bool InitPresentImageRequest(proto::PresentImageRequest& request, + const ImageFrame& image); + + /** + * @brief Check OpenChannelResponse + * @param [in] msg Open Channel Response + * @return PresenterErrorCode + */ + static PresenterErrorCode CheckOpenChannelResponse( + const ::google::protobuf::Message& msg); + + /** + * @brief Check PresentImageResponse + * @param [in] msg Present Image Response + * @return PresenterErrorCode + */ + static PresenterErrorCode CheckPresentImageResponse( + const ::google::protobuf::Message& msg); + + private: + /** + * @brief Translate OpenChannelErrorCode + * @param [in] errorCode Error code + * @return PresenterErrorCode + */ + static PresenterErrorCode TranslateErrorCode( + proto::OpenChannelErrorCode error_code); + + /** + * @brief Translate PresentDataErrorCode + * @param [in] errorCode Error code + * @return PresenterErrorCode + */ + static PresenterErrorCode TranslateErrorCode( + proto::PresentDataErrorCode error_code); +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* SRC_CHANNEL_PRESENTERMESSAGEHELPER_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/byte_buffer.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/byte_buffer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3965a3fd4481b409f70e61ac32a66d60ce19b315 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/byte_buffer.cpp @@ -0,0 +1,209 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/util/byte_buffer.h" + +#include + +#include "ascenddk/presenter/agent/util/logging.h" +#include "ascenddk/presenter/agent/util/mem_utils.h" + +using namespace google::protobuf; +using namespace google::protobuf::io; + +using std::string; + +namespace ascend { +namespace presenter { + +SharedByteBuffer SharedByteBuffer::Make(std::uint32_t size) { + char *buffer = memutils::NewArray(size); + if (buffer == nullptr) { + AGENT_LOG_ERROR("buffer new() failed"); + return SharedByteBuffer(); + } + + return SharedByteBuffer(buffer, size); +} + +SharedByteBuffer::SharedByteBuffer() + : buf_(nullptr), + size_(0) { +} + +SharedByteBuffer::SharedByteBuffer(char* buf, std::uint32_t size) + : buf_(buf, std::default_delete()), + size_(size) { +} + +const char* SharedByteBuffer::Get() const { + return buf_.get(); +} + +char* SharedByteBuffer::GetMutable() const { + return buf_.get(); +} + +uint32_t SharedByteBuffer::Size() const { + return size_; +} + +bool SharedByteBuffer::IsEmpty() const { + return size_ == 0; +} + +ByteBuffer::ByteBuffer(const char* buf, uint32_t size) + : buf(buf), + size(size) { +} + +ByteBuffer::ByteBuffer() + : buf(nullptr), + size(0) { +} + +const char* ByteBuffer::Get() const { + return buf; +} + +uint32_t ByteBuffer::Size() const { + return size; +} + +bool ByteBuffer::IsEmpty() const { + return size == 0; +} + +ByteBufferWriter::ByteBufferWriter(char* buf, int size) + : buf_(buf), + w_ptr_(buf), + end_(buf + size) { +} + +ByteBufferWriter::~ByteBufferWriter() { +} + +void ByteBufferWriter::PutUInt8(uint8_t value) { + PutBytes(&value, sizeof(value)); +} + +void ByteBufferWriter::PutUInt32(uint32_t value) { + // host byte order to network byte order + uint32_t converted = htonl(value); + PutBytes(&converted, sizeof(converted)); +} + +void ByteBufferWriter::PutString(const string& value) { + PutBytes(value.c_str(), value.size()); +} + +bool ByteBufferWriter::PutMessage(const ::google::protobuf::Message& msg) { + bool result = msg.SerializePartialToArray(w_ptr_, end_ - w_ptr_); + w_ptr_ += msg.ByteSize(); + return result; +} + +void ByteBufferWriter::PutBytes(const void *data, size_t size) { + ptrdiff_t remaining_bytes = end_ - w_ptr_; + // validate buffer state + if (remaining_bytes <= 0) { + return; + } + + if (size <= (size_t)remaining_bytes) { + memcpy(w_ptr_, data, size); + w_ptr_ += size; + return; + } + + AGENT_LOG_ERROR("memcpy error, buffer remains: %d, and requiring: %u", + remaining_bytes, size); + // memcpy failed, any following write will be meaningless, + // So set wPtr after end, to set the buffer to a faulty state + w_ptr_ += remaining_bytes + 1; +} + +ByteBuffer ByteBufferWriter::GetBuffer() { + if (buf_ == nullptr) { + return ByteBuffer(); + } + + if (w_ptr_ > end_) { + AGENT_LOG_ERROR("Buffer overflow"); + return ByteBuffer(); + } + + ptrdiff_t size = w_ptr_ - buf_; + return ByteBuffer(buf_, size); +} + +ByteBufferReader::ByteBufferReader(const char* buf, int size) + : r_ptr_(buf), + end_(buf + size) { +} + +uint8_t ByteBufferReader::ReadUInt8() { + uint8_t value = *((uint8_t*) r_ptr_); + r_ptr_ += sizeof(value); + return value; +} + +uint32_t ByteBufferReader::ReadUInt32() { + // network byte order to host byte order + uint32_t value = ntohl(*((uint32_t*) r_ptr_)); + r_ptr_ += sizeof(value); + return value; +} + +string ByteBufferReader::ReadString(int size) { + string value(r_ptr_, size); + r_ptr_ += size; + return std::move(value); +} + +bool ByteBufferReader::ReadMessage(int size, Message &message) { + // parse protobuf message + if (!message.ParseFromArray(r_ptr_, size)) { + return false; + } + + r_ptr_ += size; + return true; +} + +int ByteBufferReader::RemainingBytes() { + return end_ - r_ptr_; +} + +} /* namespace presenter */ +} /* namespace ascend */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/byte_buffer.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/byte_buffer.h new file mode 100644 index 0000000000000000000000000000000000000000..47ab750b9cd404ce614b5b487d95b157b2f49ee9 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/byte_buffer.h @@ -0,0 +1,258 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_UTIL_BYTE_BUFFER_H_ +#define ASCENDDK_PRESENTER_AGENT_UTIL_BYTE_BUFFER_H_ + +#include +#include +#include + +#include + +namespace ascend { +namespace presenter { + +class SharedByteBuffer { + public: + static SharedByteBuffer Make(std::uint32_t size); + + /** + * @brief constructor + * @param [in] buf buffer + * @param [in] size size of buffer + */ + SharedByteBuffer(char* buf, std::uint32_t size); + + /** + * @brief constructor + */ + SharedByteBuffer(); + ~SharedByteBuffer() = default; + + /** + * @brief Get buffer + * @return buffer + */ + const char* Get() const; + + /** + * @brief Get buffer + * @return buffer + */ + char* GetMutable() const; + + /** + * @brief Get size of buffer + * @return size of buffer + */ + std::uint32_t Size() const; + + /** + * @brief Check whether the buffer is empty + * @return true: empty, false: non-empty + */ + bool IsEmpty() const; + + private: + std::shared_ptr buf_; + std::uint32_t size_; +}; + +/** + * a helper data structure that holds a byte array and byte array size + * Note: ByteBuffer does NOT own the underlying data + */ +class ByteBuffer { + public: + /** + * @brief constructor + * @param [in] buf buffer + * @param [in] size size of buffer + */ + ByteBuffer(const char* buf, std::uint32_t size); + + /** + * @brief constructor + */ + ByteBuffer(); + ~ByteBuffer() = default; + + /** + * @brief Get buffer + * @return buffer + */ + const char* Get() const; + + /** + * @brief Get size of buffer + * @return size of buffer + */ + std::uint32_t Size() const; + + /** + * @brief Check whether the buffer is empty + * @return true: empty, false: non-empty + */ + bool IsEmpty() const; + + private: + const char *buf; + std::uint32_t size; +}; + +/** + * help to write kinds of data to a buffer + * after all write operation is finished, call GetBuffer to get the buffer + */ +class ByteBufferWriter { + public: + /** + * @brief constructor + * @param [out] buf output buffer + * @param [in] size size of buffer + */ + ByteBufferWriter(char* buf, int size); + ~ByteBufferWriter(); + + // Disable copy constructor and assignment operator + ByteBufferWriter(const ByteBufferWriter&) = delete; + ByteBufferWriter& operator=(const ByteBufferWriter&) = delete; + + // Write methods + // caller must make sure that there is sufficient memory is buffer + + /** + * @brief write an uint8 integer to buffer + * @param [in] value value + */ + void PutUInt8(std::uint8_t value); + + /** + * @brief write an uint32 integer to buffer + * @param [in] value value + */ + void PutUInt32(std::uint32_t value); + + /** + * @brief write an String value to buffer, '\0' is excluded + * @param [in] value string + */ + void PutString(const std::string& value); + + /** + * @brief write an protobuf message to buffer + * @param [in] msg protobuf message + * @return true: success; false: serialization failure + */ + bool PutMessage(const ::google::protobuf::Message& msg); + + /** + * @brief Finish writing and wrap the data to ByteBuffer + * @return ByteBuffer + */ + ByteBuffer GetBuffer(); + + private: + /** + * @brief put bytes to buffer + * @param [in] data buffer of data + * @param [in] size size of buffer + */ + void PutBytes(const void* data, size_t size); + + char *buf_; + char *w_ptr_; + const char* const end_; +}; + +/** + * help to read kinds of data from a buffer + * ByteBufferReader will NOT take the ownership of the buffer, + * so it is the caller's responsibility to free the memory of the buffer + */ +class ByteBufferReader { + public: + /** + * @brief constructor + * @param [in] buf buffer + * @param [in] size size of buffer + */ + ByteBufferReader(const char* buf, int size); + ~ByteBufferReader() = default; + + // Disable copy constructor and assignment operator + ByteBufferReader(const ByteBufferReader&) = delete; + ByteBufferReader& operator=(ByteBufferReader) = delete; + + /** + * @brief read an uint8 integer from buffer + * @return uint8 integer + */ + std::uint8_t ReadUInt8(); + + /** + * @brief read an uint32 integer from buffer + * @return uint32 integer + */ + std::uint32_t ReadUInt32(); + + /** + * @brief read an string from buffer + * @return string + */ + std::string ReadString(int size); + + /** + * @brief read an protobuf message from buffer + * @param [in] size size of the message + * @param [out] message + * @return true: success, false: parse message failed + */ + bool ReadMessage(int size, ::google::protobuf::Message &message); + + /** + * @brief get the size of buffer available for reading + * @return remaining bytes for reading + */ + int RemainingBytes(); + + private: + const char* r_ptr_; + const char* const end_; +}; + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_UTIL_BYTE_BUFFER_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/logging.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/logging.h new file mode 100644 index 0000000000000000000000000000000000000000..f80e83eee52e68a666a329b781430c822bf26271 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/logging.h @@ -0,0 +1,57 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_UTIL_LOGGING_H_ +#define ASCENDDK_PRESENTER_AGENT_UTIL_LOGGING_H_ + +#include "acl/acl_base.h" +#include "cerrno" + + +// debug level logging +#define AGENT_LOG_DEBUG(fmt, ...) \ + aclAppLog(ACL_DEBUG, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__) + +// info level logging +#define AGENT_LOG_INFO(fmt, ...) \ + aclAppLog(ACL_INFO, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__) + +// warn level logging +#define AGENT_LOG_WARN(fmt, ...) \ + aclAppLog(ACL_WARNING, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__) + +// error level logging +#define AGENT_LOG_ERROR(fmt, ...) \ + aclAppLog(ACL_ERROR, __FUNCTION__, __FILE__, __LINE__, fmt, ##__VA_ARGS__) + +#endif /* ASCENDDK_PRESENTER_AGENT_UTIL_LOGGING_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/mem_utils.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/mem_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..44dfd4dd2a0c885333b66209e9a378f48fc8de01 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/mem_utils.h @@ -0,0 +1,69 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_UTIL_MEM_UTILS_H_ +#define ASCENDDK_PRESENTER_AGENT_UTIL_MEM_UTILS_H_ + +#include +#include + +#include "ascenddk/presenter/agent/util/logging.h" + +namespace ascend { +namespace presenter { + +namespace memutils { + +/** + * @brief util for creating array, no throw + * @param [in] T type of the array + * @param [in] size size of the array + * @return array of type T + */ +template +T* NewArray(size_t size) { + if (size == 0) { + AGENT_LOG_ERROR("New array with size = 0"); + return nullptr; + } + + T* arr = new (std::nothrow) T[size]; + return arr; +} + +} /* namespace memutils */ + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_UTIL_MEM_UTILS_H_ */ diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/parse_config.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/parse_config.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cbda100ec74b2132c59b2003148460a2a3f45c9c --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/parse_config.cpp @@ -0,0 +1,97 @@ +#include "parse_config.h" + +#include +#include +using namespace std; + +#define COMMENT_CHAR '#' +#define EQUALS_CHAR '=' +#define BLANK_SPACE_CHAR ' ' +#define TABLE_CHAR '\t' + +bool IsSpace(char c) +{ + return (BLANK_SPACE_CHAR == c || TABLE_CHAR == c); +} + +void Trim(string& str) +{ + if (str.empty()) { + return; + } + uint32_t i, start_pos, end_pos; + for (i = 0; i < str.size(); ++i) { + if (!IsSpace(str[i])) { + break; + } + } + if (i == str.size()) { // is all blank space + str = ""; + return; + } + + start_pos = i; + + for (i = str.size() - 1; i >= 0; --i) { + if (!IsSpace(str[i])) { + break; + } + } + end_pos = i; + + str = str.substr(start_pos, end_pos - start_pos + 1); +} + +bool AnalyseLine(const string & line, string & key, string & value) +{ + if (line.empty()) + return false; + int start_pos = 0, end_pos = line.size() - 1, pos; + if ((pos = line.find(COMMENT_CHAR)) != -1) { + if (0 == pos) { //the first charactor is # + return false; + } + end_pos = pos - 1; + } + string new_line = line.substr(start_pos, start_pos + 1 - end_pos); // delete comment + + if ((pos = new_line.find(EQUALS_CHAR)) == -1) + return false; // has no = + + key = new_line.substr(0, pos); + value = new_line.substr(pos + 1, end_pos + 1- (pos + 1)); + + Trim(key); + if (key.empty()) { + return false; + } + Trim(value); + return true; +} + +bool ReadConfig(map& config, const char* configFile) +{ + config.clear(); + ifstream infile(configFile); + if (!infile) { + cout << "file open error" << endl; + return false; + } + string line, key, value; + while (getline(infile, line)) { + if (AnalyseLine(line, key, value)) { + config[key] = value; + } + } + + infile.close(); + return true; +} + +void PrintConfig(const map& config) +{ + map::const_iterator mIter = config.begin(); + for (; mIter != config.end(); ++mIter) { + cout << mIter->first << "=" << mIter->second << endl; + } +} diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/parse_config.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/parse_config.h new file mode 100644 index 0000000000000000000000000000000000000000..1a0d4049493cf4ce43d95222a26521ef9c8971d9 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/parse_config.h @@ -0,0 +1,10 @@ +#ifndef _GET_CONFIG_H_ +#define _GET_CONFIG_H_ + +#include +#include +using namespace std; + +bool ReadConfig(map& config, const char* configFile); +void PrintConfig(const map & m); +#endif diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/socket_utils.cpp b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/socket_utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d36a828fa29c0d5cfa41eb5362514f056c1b48c9 --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/socket_utils.cpp @@ -0,0 +1,238 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#include "ascenddk/presenter/agent/util/socket_utils.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "ascenddk/presenter/agent/util/logging.h" + +namespace { + +// no flag is need for now +const int kSocketFlagNone = 0; + +// socket closed +const int kSocketClosed = 0; + +// connect timeout +const int kDefaultTimeoutInSec = 3; + +const int kSocketSuccess = 0; + +// indicating invalid socket file descriptor +const int kSocketFdNull = -1; + +const int kReuseAddress = 1; + +} + +namespace ascend { +namespace presenter { +namespace socketutils { + +// set blocking mode +void SetNonBlocking(int socket, bool nonblocking) { + // get original mask + long mask = fcntl(socket, F_GETFL, NULL); + if (nonblocking) { + mask |= O_NONBLOCK; // set nonblocking + } else { + mask &= ~O_NONBLOCK; // unset nonblocking + } + (void) fcntl(socket, F_SETFL, mask); +} + +bool SetSockAddr(const char *host_ip, uint16_t port, sockaddr_in &addr) { + // valid port is 1~65535 + if (port == 0) { + AGENT_LOG_ERROR("Invalid port: %d", port); + return false; + } + + // convert host and port to sockaddr + memset(&addr, 0, sizeof(addr)); + + addr.sin_family = AF_INET; + addr.sin_port = htons(port); + if (inet_pton(AF_INET, host_ip, &addr.sin_addr) <= 0) { + AGENT_LOG_ERROR("Invalid host IP: %s", host_ip); + return false; + } + + return true; +} + +void SetSocketReuseAddr(int socket) { + // set reuse address + int so_reuse = kReuseAddress; + int ret = setsockopt(socket, SOL_SOCKET, SO_REUSEADDR, &so_reuse, + sizeof(so_reuse)); + if (ret != kSocketSuccess) { + AGENT_LOG_WARN("set socket opt SO_REUSEADDR failed"); + } +} + +void SetSocketTimeout(int socket, int timeout_in_sec) { + // initialize timeout + timeval timeout = { timeout_in_sec, 0 }; + + // set write timeout + int ret = setsockopt(socket, SOL_SOCKET, SO_SNDTIMEO, &timeout, + sizeof(timeout)); + if (ret != kSocketSuccess) { + AGENT_LOG_WARN("set socket opt SO_SNDTIMEO failed"); + } + + // set read timeout + ret = setsockopt(socket, SOL_SOCKET, SO_RCVTIMEO, &timeout, sizeof(timeout)); + if (ret != kSocketSuccess) { + AGENT_LOG_WARN("set socket opt SO_RCVTIMEO failed"); + } +} + +int CreateSocket() { + return ::socket(AF_INET, SOCK_STREAM, 0); +} + +int Connect(int socket, const sockaddr_in& addr) { + // Ignore SIGPIPE signals + signal(SIGPIPE, SIG_IGN); + + // set nonblocking and connect with timeout + SetNonBlocking(socket, true); + + // do connect + int ret = ::connect(socket, (sockaddr*) &addr, sizeof(addr)); + if (ret < 0) { + if (errno != EINPROGRESS) { + AGENT_LOG_ERROR("connect() error: %s", strerror(errno)); + return kSocketError; + } + + fd_set fdset; + FD_ZERO(&fdset); + FD_SET(socket, &fdset); + // connect timeout = 3 seconds + timeval tv = { kDefaultTimeoutInSec, 0 }; + int select_ret = select(socket + 1, NULL, &fdset, NULL, &tv); + if (select_ret < 0) { // error + AGENT_LOG_ERROR("select() error: %s", strerror(errno)); + return kSocketError; + } + + if (select_ret == 0) { // no FD is ready + AGENT_LOG_ERROR("select() timeout"); + return kSocketError; + } + + int so_error = kSocketError; + socklen_t len = sizeof(so_error); + getsockopt(socket, SOL_SOCKET, SO_ERROR, &so_error, &len); + if (so_error != kSocketSuccess) { + AGENT_LOG_ERROR("getsockopt() error: %d", so_error); + return kSocketError; + } + } + + // reset to blocking mode + SetNonBlocking(socket, false); + return kSocketSuccess; +} + +int ReadN(int socket, char *buffer, int size) { + int received_cnt = 0; + // keep reading until nReceived == size + while (received_cnt < size) { + char *write_ptr = buffer + received_cnt; + int ret = ::recv(socket, write_ptr, size - received_cnt, kSocketFlagNone); // [false alarm]: will never write over size + if (ret == kSocketError) { + if (errno == EAGAIN || errno == EINTR) { + AGENT_LOG_INFO("recv() timeout. error = %s", strerror(errno)); + return kSocketTimeout; + } + + AGENT_LOG_ERROR("recv() error. error = %s", strerror(errno)); + return kSocketError; + } + + if (ret == kSocketClosed) { + AGENT_LOG_ERROR("socket closed"); + return kSocketError; + } + + received_cnt += ret; + } + + return received_cnt; +} + +int WriteN(int socket, const char *data, int size) { + int sent_cnt = 0; + // keep reading until nReceived == size + while (sent_cnt < size) { + const char *read_ptr = data + sent_cnt; + int ret = ::send(socket, read_ptr, size - sent_cnt, kSocketFlagNone); + if (ret == kSocketError) { + AGENT_LOG_ERROR("send() error. errno = %s", strerror(errno)); + return kSocketError; + } + + if (ret == kSocketClosed) { + AGENT_LOG_ERROR("socket closed"); + return kSocketError; + } + + sent_cnt += ret; + } + + return sent_cnt; +} + +void CloseSocket(int &socket) { + if (socket >= 0) { + (void) close(socket); + socket = kSocketFdNull; + } +} + +} /* namespace sockutil */ +} /* namespace presenter */ +} /* namespace ascend */ + diff --git a/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/socket_utils.h b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/socket_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..843841caad49669d5b99392ef9108411e722df4b --- /dev/null +++ b/cplusplus/common/presenteragent/src/ascenddk/presenter/agent/util/socket_utils.h @@ -0,0 +1,117 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef ASCENDDK_PRESENTER_AGENT_UTIL_SOCKET_UTILS_H_ +#define ASCENDDK_PRESENTER_AGENT_UTIL_SOCKET_UTILS_H_ + +#include +#include +#include + +namespace ascend { +namespace presenter { + +namespace socketutils { + +// indicating socket error +const int kSocketError = -1; + +// indicating socket timeout +const int kSocketTimeout = -11; + +/** + * @brief SetSockAddr + * @param [in] host_ip host IP + * @param [in] port port + * @param [out] addr address + * @return true: success, false: failure + */ +bool SetSockAddr(const char *host_ip, uint16_t port, sockaddr_in &addr); + +/** + * @brief set reuse address option + * @param [in] socket file descriptor of the socket + */ +void SetSocketReuseAddr(int socket); + +/** + * @brief set read timeout and write timeout to a socket + * @param [in] socket file descriptor of the socket + * @param [in] timeout_in_sec timeout in second + */ +void SetSocketTimeout(int socket, int timeout_in_sec); + +/** + * @brief Create a new socket + * @return a file descriptor for the new socket, or SOCKET_ERROR(-1) for errors + */ +int CreateSocket(); + +/** + * @brief Open a connection on socket FD to peer at ADDR + * @param [in] socket file descriptor of the socket + * @param [in] addr peer address + * @return 0 on success, -1 for errors. + */ +int Connect(int socket, const sockaddr_in &addr); + +/** + * @brief Read N bytes into BUF from socket FD. + * @param [in] socket file descriptor of the socket + * @param [out] buffer buffer to write data to + * @param [in] size size of data to read + * @return the number read or -1 for errors. + */ +int ReadN(int socket, char *buffer, int size); + +/** + * @brief Write N bytes into BUF to socket FD. + * @param [in] socket file descriptor of the socket + * @param [in] data buffer of data to write to socket + * @param [in] size size of data to write + * @return the number wrote or -1 for errors. + */ +int WriteN(int socket, const char *data, int size); + +/** + * @brief close the socket + * @param [in|out] socket file descriptor of the socket + */ +void CloseSocket(int &socket); + +} /* namespace socketutils */ + +} /* namespace presenter */ +} /* namespace ascend */ + +#endif /* ASCENDDK_PRESENTER_AGENT_UTIL_SOCKET_UTILS_H_ */ diff --git a/cplusplus/environment/README_CN.md b/cplusplus/environment/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..d85db9d952c9f79a8297a57edbdee3c73edcee66 --- /dev/null +++ b/cplusplus/environment/README_CN.md @@ -0,0 +1,27 @@ +中文|[English](README_EN.md) + +# c++环境准备和依赖安装 + +### 介绍 +运行samples仓中的c++样例之前,请根据本文指导安装第三方依赖并配置好环境。 + + +### 步骤 +请依次执行以下三步,请根据设备形态,点击对应的链接进行安装。 + +1.基础环境配置 +- [for_atlas200dk](./prepare_ENV/README_200DK_CN.md) +- [for_atlas300](./prepare_ENV/README_300_CN.md) + +2.安装ffmpeg和opencv +- [for_atlas200dk](./opencv_install/README_200DK_CN.md) +- [for_atlas300](./opencv_install/README_300_CN.md) + +3.安装atlasutil库 +- [for_atlas200dk](./atlasutil_install/README_200DK_CN.md) +- [for_atlas300](./atlasutil_install/README_300_CN.md) + +4.安装Presenter Agent +- [for_atlas200dk](./presenteragent_install/README_200DK_CN.md) +- [for_atlas300](./presenteragent_install/README_300_CN.md) + diff --git a/cplusplus/environment/README_EN.md b/cplusplus/environment/README_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..1dd53648f4d222c86d9b8342228f065146cd1df0 --- /dev/null +++ b/cplusplus/environment/README_EN.md @@ -0,0 +1,26 @@ +English|[中文](README_CN.md) + +# C++ environment preparation and dependency installation + +### Introduction +Introduction + + +### Step +Please perform the following three steps in sequence, please click the corresponding link to install according to the device type. + +1.Basic environment configuration +- [for_atlas200dk](./prepare_ENV/README_200DK_EN.md) +- [for_atlas300](./prepare_ENV/README_300_EN.md) + +2.Install ffmpeg and opencv +- [for_atlas200dk](./opencv_install/README_200DK_EN.md) +- [for_atlas300](./opencv_install/README_300_EN.md) + +3.Install atlasutil +- [for_atlas200dk](./atlasutil_install/README_200DK_EN.md) +- [for_atlas300](./atlasutil_install/README_300_EN.md) + +4.Install Presenter Agent +- [for_atlas200dk](./presenteragent_install/README_200DK_EN.md) +- [for_atlas300](./presenteragent_install/README_300_EN.md) diff --git a/cplusplus/environment/atlasutil_install/README_200DK_CN.md b/cplusplus/environment/atlasutil_install/README_200DK_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..9b6346a068df2f5bdf855558b0499a10c182917d --- /dev/null +++ b/cplusplus/environment/atlasutil_install/README_200DK_CN.md @@ -0,0 +1,44 @@ +中文|[English](README_300_EN.md) +# atlasutil库使用说明 +1. atlasutil库对当前开源社区样例中 + + - Atlas200DK板载摄像头 + + - acl dvpp图像和视频处理 + + - acl模型推理等进行封装 + + 等重复代码进行封装,提供一组公共接口。 + + +2. 本库仅供当前社区开源样例使用,不覆盖ascend平台应用开发的所有场景,不作为用户应用开发的标准库;仅支持Atlas200DK和Atlas300样例。 + +# 部署方法 + +$\color{red}{以下命令在开发环境上用安装开发套件包的用户执行}$ +1. 下载源码 + **cd $HOME** + **git clone https://gitee.com/ascend/samples.git** + +2. 设置环境变量,在命令行内执行 + + export DDK_PATH=\$HOME/Ascend/ascend-toolkit/latest/**_ARCH_** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + >- 请将\$HOME/Ascend/ascend-toolkit/latest替换为ACLlib安装包的实际安装路径。 + >- 若版本为20.0,请将 **ARCH** 替换为arm64-linux_gcc7.3.0;若版本为20.1,请将 **ARCH** 替换为arm64-linux。 + +3. 编译并安装atlasutil + **cd $HOME/samples/cplusplus/common/atlasutil/** + **make** + **make install** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **生成的libatalsutil.so在\\$HOME/ascend_ddk/arm/lib/下;头文件在\\$HOME/ascend_ddk/arm/include/atlasutil下。** + +4. 将编译好的so传到运行环境 (如开发环境和运行环境安装在同一服务器,请忽略此步) + **scp \$HOME/ascend_ddk/arm/lib/libatlasutil.so HwHiAiUser@192.168.1.2:/home/HwHiAiUser/ascend_ddk/arm/lib/** + + + + + + diff --git a/cplusplus/environment/atlasutil_install/README_300_CN.md b/cplusplus/environment/atlasutil_install/README_300_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..af46356c9ba4be60d48c44f05cd2ad6dd83a5114 --- /dev/null +++ b/cplusplus/environment/atlasutil_install/README_300_CN.md @@ -0,0 +1,46 @@ +中文|[English](README_300_EN.md) +# atlasutil库使用说明 +1. atlasutil库对当前开源社区样例中 + + - Atlas200DK板载摄像头 + + - acl dvpp图像和视频处理 + + - acl模型推理等进行封装 + + 等重复代码进行封装,提供一组公共接口。 + + +2. 本库仅供当前社区开源样例使用,不覆盖ascend平台应用开发的所有场景,不作为用户应用开发的标准库;仅支持Atlas200DK和Atlas300样例。 + +# 部署方法 + +$\color{red}{以下命令在开发环境上用安装开发套件包的用户执行}$ +1. 下载源码 + **cd $HOME** + **git clone https://gitee.com/ascend/samples.git** + +2. 设置环境变量 + export DDK_PATH=$HOME/Ascend/ascend-toolkit/latest/**_ARCH_** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + 请将$HOME/Ascend/ascend-toolkit/latest替换为ACLlib安装包的实际安装路径。 + 若版本为20.0,请将 **ARCH** 替换为x86_64-linux_gcc7.3.0;若版本为20.1,请将 **ARCH** 替换为x86_64-linux。 + +3. 编译并安装atlasutil + **cd $HOME/samples/cplusplus/common/atlasutil/** + **make mode=ASIC** + **make mode=ASIC install** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **生成的libatalsutil.so在\\$HOME/ascend_ddk/x86/lib/下;头文件在\\$HOME/ascend_ddk/x86/include/atlasutil下。** + +4. 将编译好的so传到运行环境 (如开发环境和运行环境安装在同一服务器,请忽略此步) + **scp \$HOME/ascend_ddk/x86/lib/libatlasutil.so HwHiAiUser@_IP_:/home/HwHiAiUser/ascend_ddk/x86/lib/** + + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + 请将IP替换为ai1s云端推理环境的公网ip地址,HwHiAiUser替换为实际的运行用户。 + + + + + + diff --git a/cplusplus/environment/opencv_install/README_200DK_CN.md b/cplusplus/environment/opencv_install/README_200DK_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..748a29bace511ced1674865fe3ba16bdf0ceddcc --- /dev/null +++ b/cplusplus/environment/opencv_install/README_200DK_CN.md @@ -0,0 +1,78 @@ +中文|[English](README_200DK_EN.md) + +# 安装ffmpeg+opencv + +安装ffmpeg和opencv的原因是适配多样性的数据预处理和后处理,昇腾社区的部分样例也是基于ffmpeg和opencv做的处理。 + +$\color{red}{以下操作在运行环境(Atlas200DK)上操作}$ + + +1. 安装相关依赖 + **sudo apt-get install build-essential libgtk2.0-dev libavcodec-dev libavformat-dev libjpeg-dev libtiff5-dev git cmake libswscale-dev pkg-config -y** + + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **若apt-get安装依赖出现类似报错(dpkg: error processing package *** (--configure)) ,请参考[FAQ](https://bbs.huaweicloud.com/forum/thread-74123-1-1.html)来解决。** + + +2. 安装ffmpeg + + 创建文件夹,用于存放编译后的文件 + **mkdir -p /home/HwHiAiUser/ascend_ddk/arm --no-check-certificate** + + 下载ffmpeg + **cd $HOME** + **wget http://www.ffmpeg.org/releases/ffmpeg-4.1.3.tar.gz** + **tar -zxvf ffmpeg-4.1.3.tar.gz** + **cd ffmpeg-4.1.3** + + 安装ffmpeg + **./configure --enable-shared --enable-pic --enable-static --disable-x86asm --prefix=/home/HwHiAiUser/ascend_ddk/arm** + **make -j8** + **make install** + + 将ffmpeg添加到系统环境变量中,使得其他程序能够找到ffmpeg环境 + **su root** + **vim /etc/ld.so.conf.d/ffmpeg.conf** + 在末尾添加一行 + **/home/HwHiAiUser/ascend_ddk/arm/lib** + 使配置生效 + **ldconfig** + + 配置profile系统文件 + **vim /etc/profile** + 在末尾添加一行 + **export PATH=$PATH:/home/HwHiAiUser/ascend_ddk/arm/bin** + 使配置文件生效 + **source /etc/profile** + 使opencv能找到ffmpeg + **cp /home/HwHiAiUser/ascend_ddk/arm/lib/pkgconfig/\* /usr/share/pkgconfig** + 退出root用户 + **exit** + +3. 安装opencv + 下载opencv + **cd $HOME** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv.git** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv_contrib.git** + **cd opencv** + **mkdir build** + **cd build** + + 编译并安装opencv + ``` + cmake -D BUILD_SHARED_LIBS=ON -D BUILD_TESTS=OFF -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/home/HwHiAiUser/ascend_ddk/arm -D WITH_LIBV4L=ON -D OPENCV_EXTRA_MODULES=../../opencv_contrib/modules .. + ``` + + **make -j8** + **make install** + + +4. 将开发板上安装的ffmpeg和opencv库导入开发环境中,以提供编译使用。 (如开发环境与运行环境都在Atlas200DK上,请忽略此步) + + $\color{red}{以下操作在开发环境执行}$ + 使用普通用户执行 + **mkdir $HOME/ascend_ddk** + **scp -r HwHiAiUser@192.168.1.2:/home/HwHiAiUser/ascend_ddk/arm $HOME/ascend_ddk** + **sudo cd /usr/lib/aarch64-linux-gnu** + **sudo scp -r HwHiAiUser@192.168.1.2:/lib/aarch64-linux-gnu/\* ./** + **sudo scp -r HwHiAiUser@192.168.1.2:/usr/lib/aarch64-linux-gnu/\* ./** \ No newline at end of file diff --git a/cplusplus/environment/opencv_install/README_200DK_EN.md b/cplusplus/environment/opencv_install/README_200DK_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..286cfc57f4287475d8b642f49967838eae2daee9 --- /dev/null +++ b/cplusplus/environment/opencv_install/README_200DK_EN.md @@ -0,0 +1,77 @@ +English|[中文](README_200DK_CN.md) + +# Installing FFmpeg and OpenCV + +FFmpeg and OpenCV are installed to implement diversified data preprocessings and postprocessings. Most of the samples provided by the Ascend Developer Zone utilize the data processing capabilities backed by FFmpeg and OpenCV. + + **Perform the following operations in the operating environment (Atlas 200 DK).** + + +1. Install dependencies. + **sudo apt-get install build-essential libgtk2.0-dev libavcodec-dev libavformat-dev libjpeg-dev libtiff5-dev git cmake libswscale-dev pkg-config -y** + + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + > **If an error similar to "dpkg: error processing package *** (--configure)" is displayed during the apt-get installation, rectify the fault by referring to [FAQ](https://bbs.huaweicloud.com/forum/thread-74123-1-1.html).** + + +2. Install FFmpeg. + + Create a folder for storing build output files. + **mkdir -p /home/HwHiAiUser/ascend_ddk/arm** + + Download FFmpeg. + **cd $HOME** + **wget http://www.ffmpeg.org/releases/ffmpeg-4.1.3.tar.gz --no-check-certificate** + **tar -zxvf ffmpeg-4.1.3.tar.gz** + **cd ffmpeg-4.1.3** + + Install FFmpeg. + **./configure --enable-shared --enable-pic --enable-static --disable-x86asm --prefix=/home/HwHiAiUser/ascend_ddk/arm** + **make -j8** + **make install** + + Add FFmpeg to the path of the system using environment variables so that other programs can find the FFmpeg program. + **su root** + **vim /etc/ld.so.conf.d/ffmpeg.conf** + Append the following line to the file. + **/home/HwHiAiUser/ascend_ddk/arm/lib** + Make the configuration take effect. + **ldconfig** + + Configure the profile system file. + **vim /etc/profile** + Append the following line to the file. + **export PATH=$PATH:/home/HwHiAiUser/ascend_ddk/arm/bin** + Make the configuration file take effect. + **source /etc/profile** + Make OpenCV find FFmpeg. + **cp /home/HwHiAiUser/ascend_ddk/arm/lib/pkgconfig/\* /usr/share/pkgconfig** + Exit the **root** user. + **exit** + +3. Install OpenCV. + Download OpenCV. + **cd $HOME** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv.git** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv_contrib.git** + **cd opencv** + **mkdir build** + **cd build** + + Build and install OpenCV. + ``` + cmake -D BUILD_SHARED_LIBS=ON -D BUILD_TESTS=OFF -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/home/HwHiAiUser/ascend_ddk/arm -D WITH_LIBV4L=ON -D OPENCV_EXTRA_MODULES=../../opencv_contrib/modules .. + ``` + + **make -j8** + **make install** + +4. Import the FFmpeg and OpenCV libraries installed on the Atlas 200 DK to the development environment for building. (Skip this step if both the development environment and operating environment are set up on the Atlas 200 DK.) + + **Perform the following operations in the development environment.** + Run the following commands as a common user: + **mkdir $HOME/ascend_ddk** + **scp -r HwHiAiUser@192.168.1.2:/home/HwHiAiUser/ascend_ddk/arm \$HOME/ascend_ddk** + **sudo cd /usr/lib/aarch64-linux-gnu** + **sudo scp -r HwHiAiUser@192.168.1.2:/lib/aarch64-linux-gnu/\* ./** + **sudo scp -r HwHiAiUser@192.168.1.2:/usr/lib/aarch64-linux-gnu/\* ./** \ No newline at end of file diff --git a/cplusplus/environment/opencv_install/README_300_CN.md b/cplusplus/environment/opencv_install/README_300_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..f7e6a4ba967899c7b45756ace13aca7a8575e717 --- /dev/null +++ b/cplusplus/environment/opencv_install/README_300_CN.md @@ -0,0 +1,84 @@ +中文|[English](README_300_EN.md) + +# 安装ffmpeg+opencv + +安装ffmpeg和opencv的原因是适配多样性的数据预处理和后处理,昇腾社区的部分样例也是基于ffmpeg和opencv做的处理。 + +$\color{red}{以下命令在开发环境上执行,以普通用户为HwHiAiUser为例,请根据实际情况进行修改。}$ + + +1. 安装相关依赖 + **sudo apt-get install build-essential libgtk2.0-dev libavcodec-dev libavformat-dev libjpeg-dev libtiff5-dev git cmake libswscale-dev pkg-config -y** + + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **若apt-get安装依赖出现类似报错(dpkg: error processing package *** (--configure)) ,请参考[FAQ](https://bbs.huaweicloud.com/forum/thread-74123-1-1.html)来解决。** + +2. 安装ffmpeg + 1. 创建文件夹,用于存放编译后的文件 + **mkdir -p $HOME/ascend_ddk/x86** + + 2. 下载ffmpeg + **cd $HOME** + **wget http://www.ffmpeg.org/releases/ffmpeg-4.1.3.tar.gz --no-check-certificate** + **tar -zxvf ffmpeg-4.1.3.tar.gz** + **cd ffmpeg-4.1.3** + + 3. 安装ffmpeg + **./configure --enable-shared --enable-pic --enable-static --disable-x86asm --prefix=\$HOME/ascend_ddk/x86** + **make -j8** + **make install** + + 4. 将ffmpeg添加到系统环境变量中,使得其他程序能够找到ffmpeg环境 + 切换为root用户 + **su root** + 打开conf配置文件 + **vim /etc/ld.so.conf.d/ffmpeg.conf** + 在末尾添加一行。 + **/home/HwHiAiUser/ascend_ddk/x86/lib** + 使配置生效 + **ldconfig** + + 5. 配置profile系统文件 + **vim /etc/profile** + 在末尾添加一行 + **export PATH=$PATH:/home/HwHiAiUser/ascend_ddk/x86/bin** + 使配置文件生效 + **source /etc/profile** + + 6. 使opencv能找到ffmpeg。 + **cp /home/HwHiAiUser/ascend_ddk/x86/lib/pkgconfig/\* /usr/share/pkgconfig** + 切换回普通用户 + **exit** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明: + 4、5、6三步中的HwHiAiUser请根据实际情况替换。** + +3. 安装opencv + 1. 下载opencv + **cd \$HOME** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv.git** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv_contrib.git** + **cd opencv** + **mkdir build** + **cd build** + + 2. 安装opencv + ``` + cmake -D BUILD_SHARED_LIBS=ON -D BUILD_TESTS=OFF -D CMAKE_BUILD_TYPE=RELEASE -D WITH_LIBV4L=ON -D OPENCV_EXTRA_MODULES=../../opencv_contrib/modules -D CMAKE_INSTALL_PREFIX=$HOME/ascend_ddk/x86 .. + ``` + **make -j8** + **make install** + + +4. 将开发环境安装的ffmpeg、opencv库导入运行环境中,以提供运行使用 **(如开发环境和运行环境在同一服务器上,请忽略此步)** + + $\color{red}{注意:以下操作在运行环境执行}$ + + 普通用户登录运行环境 + + **mkdir \$HOME/ascend_ddk** + **scp -r HwHiAiUser@X.X.X.X:/home/HwHiAiUser/ascend_ddk/x86 \$HOME/ascend_ddk** + **scp -r HwHiAiUser@X.X.X.X:/usr/lib/x86_64-linux-gnu/lib\* \$HOME/ascend_ddk/x86/lib** +

+ + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明: + X.X.X.X请替换为开发环境的ip,HwHiAiUser请根据实际情况替换。** \ No newline at end of file diff --git a/cplusplus/environment/opencv_install/README_300_EN.md b/cplusplus/environment/opencv_install/README_300_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..7cc0ac2f050226546af480b128ba52e7ea1ba5f4 --- /dev/null +++ b/cplusplus/environment/opencv_install/README_300_EN.md @@ -0,0 +1,86 @@ +English|[中文](README_300_CN.md) + +# Installing FFmpeg and OpenCV + +FFmpeg and OpenCV are installed to implement diversified data preprocessings and postprocessings. Most of the samples provided by the Ascend Developer Zone utilize the data processing capabilities backed by FFmpeg and OpenCV. + + **Run the following commands in the development environment as a common user. The following takes **HwHiAiUser**as an example. Replace it with the actual running user.** + + +1. Install dependencies. + **sudo apt-get install build-essential libgtk2.0-dev libavcodec-dev libavformat-dev libjpeg-dev libtiff5-dev git cmake libswscale-dev pkg-config -y** + + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + > **If an error similar to "dpkg: error processing package *** (--configure)" is displayed during the apt-get installation, rectify the fault by referring to [FAQ](https://bbs.huaweicloud.com/forum/thread-74123-1-1.html).** + +2. Install FFmpeg. + 1. Create a folder for storing build output files. + **mkdir -p $HOME/ascend_ddk/x86** + + 2. Download FFmpeg. + **cd $HOME** + **wget http://www.ffmpeg.org/releases/ffmpeg-4.1.3.tar.gz --no-check-certificate** + **tar -zxvf ffmpeg-4.1.3.tar.gz** + **cd ffmpeg-4.1.3** + + 3. Install FFmpeg. + **./configure --enable-shared --enable-pic --enable-static --disable-x86asm --prefix=$HOME/ascend_ddk/x86** + **make -j8** + **make install** + + 4. Add FFmpeg to the path pf the system using environment variables so that other programs can find the FFmpeg program. + Switch to the **root** user. + **su root** + Open the CONF configuration file. + **vim /etc/ld.so.conf.d/ffmpeg.conf** + Append the following line to the file. + **/home/HwHiAiUser/ascend_ddk/x86/lib** + Make the configuration take effect. + **ldconfig** + + 5. Configure the profile system file. + **vim /etc/profile** + Append the following line to the file. + **export PATH=$PATH:/home/HwHiAiUser/ascend_ddk/x86/bin** + Make the configuration file take effect. + **source /etc/profile** + + 6. Make OpenCV find FFmpeg. + **cp /home/HwHiAiUser/ascend_ddk/x86/lib/pkgconfig/\* /usr/share/pkgconfig** + Switch to the common user. + **exit** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >Replace **HwHiAiUser** in steps 4, 5, and 6 based on the site requirements.** + +3. Install OpenCV. + 1. Download OpenCV. + **cd $HOME** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv.git** + **git clone -b 4.3.0 https://gitee.com/mirrors/opencv_contrib.git** + **cd opencv** + **mkdir build** + **cd build** + + 2. Install OpenCV. + ``` + cmake -D BUILD_SHARED_LIBS=ON -D BUILD_TESTS=OFF -D CMAKE_BUILD_TYPE=RELEASE -D WITH_LIBV4L=ON -D OPENCV_EXTRA_MODULES=../../opencv_contrib/modules -D CMAKE_INSTALL_PREFIX=$HOME/ascend_ddk/x86 .. + ``` + + **make -j8** + **make install** + + +4. Import the FFmpeg and OpenCV libraries installed in the development environment to the operating environment for execution. **(Skip this step if both the development environment and operating environment are on the same server.)** + + **Note: Perform the following operation in the operating environment.** + + Log in to the operating environment as a common user. + + **mkdir $HOME/ascend_ddk** + **scp -r HwHiAiUser@X.X.X.X:/home/HwHiAiUser/ascend_ddk/x86 $HOME/ascend_ddk** + **scp -r HwHiAiUser@X.X.X.X:/usr/lib/x86_64-linux-gnu/lib\* $HOME/ascend_ddk/x86/lib** + + + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE: Replace *X.X.X.X* with the IP address of the development environment and HwHiAiUser with the actual running user.** + + diff --git a/cplusplus/environment/prepare_ENV/README_200DK_CN.md b/cplusplus/environment/prepare_ENV/README_200DK_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..b07141b8c4aa226dcb3d5903454228533a002d50 --- /dev/null +++ b/cplusplus/environment/prepare_ENV/README_200DK_CN.md @@ -0,0 +1,197 @@ +中文|[English](README_200DK_EN.md) + +# 基础环境配置 +本文的目的是进行基础环境配置,包含sudo权限配置、apt源配置、开发者板联网、环境变量配置。如已配置,均可跳过。 + +$\color{red}{以下操作在开发环境上操作,以普通用户为HwHiAiUser为例,请根据实际情况进行修改。}$ + + +1. 给HwHiAiUser用户配置sudo权限 + + + 切换为root用户 + **su root** + + 给sudoer文件配置写权限,并打开该文件 + **chmod u+w /etc/sudoers** + **vi /etc/sudoers** + + 在该文件 `# User privilege specification` 下面增加如下内容: + **HwHiAiUser ALL=(ALL:ALL) ALL** + + ![输入图片说明](https://images.gitee.com/uploads/images/2020/1128/144046_7c02d0d0_7401379.png "屏幕截图.png") + + 完成后,执行以下命令取消`/etc/sudoers`文件的写权限 + **chmod u-w /etc/sudoers** + 切换回普通用户 + **exit** + +2. apt源配置 + + 配置ubuntu18.04-x86的apt清华源 + + **sudo vi /etc/apt/sources.list** + + 将源文件内容替换为以下ubuntu18.04-x86的apt清华源 + + ``` + # 默认注释了源码镜像以提高 apt update 速度,如有需要可自行取消注释 + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + ``` + 执行以下命令更新源 + **sudo apt-get update** +

+ >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **如果sudo apt-get update失败,可以试用其他的国内源 https://www.cnblogs.com/dream4567/p/9690850.html** + + +3. 在开发环境安装编译工具 + + **sudo apt-get install -y g++\-aarch64-linux-gnu g++\-5-aarch64-linux-gnu** + +4. 在开发环境中添加以下环境变量,用于atc模型转换 + + 1. 打开.bashrc文件 + **vim ~/.bashrc** + + 在文件中添加以下环境变量 + **export install_path=\\$HOME/Ascend/ascend-toolkit/latest** + + **export PATH=/usr/local/python3.7.5/bin:\\${install_path}/atc/ccec_compiler/bin:\\${install_path}/atc/bin:\\$PATH** + + **export ASCEND_OPP_PATH=\\${install_path}/opp** + + **export LD_LIBRARY_PATH=\\${install_path}/atc/lib64** + + 其中,PYTHONPATH环境变量在20.0和20.1版本不兼容。请按照CANN版本选择对应方式执行命令添加PYTHONPATH环境变量。 + + - 20.0版本 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages/te:\\${install_path}/atc/python/site-packages/topi:\\$PYTHONPATH** + + - 20.1版本 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages:\\${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:\\${install_path}/atc/python/site-packages/schedule_search.egg:$PYTHONPATH** + + 保存退出 + **wq!** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + >**- install_path 请根据实际情况修改。** + >**- 若开发环境与运行环境部署在一台服务器上时,请勿配置LD_LIBRARY_PATH,在运行样例时,会跟运行环境的LD_LIBRARY_PATH有冲突。** + + 2. 执行如下命令使环境变量生效 + **source ~/.bashrc** + +5. 在开发环境部署Media模块 + 1. 将[A200dk-npu-driver-{software version}-ubuntu18.04-aarch64-minirc.tar.gz](https://www.huaweicloud.com/ascend/resource/Software)以开发环境安装用户上传到\$HOME/Ascend目录下。 + 2. 解压driver包 + **cd \$HOME/Ascend** + **tar zxvf A200dk-npu-driver-{software version}-ubuntu18.04-aarch64-minirc.tar.gz** + + +$\color{red}{以下操作在运行环境(Atlas200DK)上操作}$ +1. 登录运行环境 + ssh HwHiAiUser@X.X.X.X + +2. 给HwHiAiUser用户配置sudo权限 + + + 切换为root用户 (root用户默认密码:Mind@123) + **su root** + + 给sudoer文件配置写权限,并打开该文件 + **chmod u+w /etc/sudoers** + **vi /etc/sudoers** + + 在该文件 `# User privilege specification` 下面增加如下内容: + **HwHiAiUser ALL=(ALL:ALL) ALL** + + ![输入图片说明](https://images.gitee.com/uploads/images/2020/1128/121157_37d3b82d_7401379.png "屏幕截图.png") + 完成后,执行以下命令取消`/etc/sudoers`文件的写权限,并切换回普通用户 + **chmod u-w /etc/sudoers** + **exit** + +3. 开发者板设置联网 + + **sudo vi /etc/netplan/01-netcfg.yaml** + 填写以下配置 + **注:需要注意这里的缩进格式,netplan配置时和python类似,对缩进有强限制** + + ``` + network: + version: 2 + # renderer: NetworkManager + renderer: networkd + ethernets: + eth0: + dhcp4: yes + + usb0: + dhcp4: no + addresses: [192.168.1.2/24] + gateway4: 192.168.0.1 + ``` + + + 将开发板网口接上可正常联网的网线,执行以下命令使配置生效 + **sudo netplan apply** + +4. 开发者板apt换源配置 + + **以下给出两种源,选择其中一种使用,如更新源失败,请自行更换可用Ubuntu 18.04 arm源** + - ubuntu18.04-arm华为源 + + 执行以下换源操作 + **sudo wget -O /etc/apt/sources.list https://repo.huaweicloud.com/repository/conf/Ubuntu-Ports-bionic.list --no-check-certificate** + + 更新源 + **sudo apt-get update** + + - ubuntu18.04-arm官方源 + + 修改源文件 + **sudo vi /etc/apt/sources.list** + + 将源文件内容替换为以下ubuntu-arm官方源。 + ``` + deb http://ports.ubuntu.com/ bionic main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic main restricted universe multiverse + deb http://ports.ubuntu.com/ bionic-updates main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic-updates main restricted universe multiverse + deb http://ports.ubuntu.com/ bionic-security main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic-security main restricted universe multiverse + deb http://ports.ubuntu.com/ bionic-backports main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic-backports main restricted universe multiverse + deb http://ports.ubuntu.com/ubuntu-ports/ bionic main universe restricted + deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main universe restricted + ``` + + + 更新源。 + **sudo apt-get update** + +5. 在运行环境添加环境变量,用于运行工程。 + 1. 打开.bashrc文件 + **vim ~/.bashrc** + + 在文件中添加以下环境变量 + **export LD_LIBRARY_PATH=/home/HwHiAiUser/ascend_ddk/arm/lib:/home/HwHiAiUser/Ascend/acllib/lib64:$LD_LIBRARY_PATH** + + **export PYTHONPATH=/home/HwHiAiUser/Ascend/pyACL/python/site-packages/acl:$PYTHONPATH** + + 保存退出 + **wq!** + + + 2. 执行如下命令使环境变量生效。 + **source ~/.bashrc** + + + \ No newline at end of file diff --git a/cplusplus/environment/prepare_ENV/README_200DK_EN.md b/cplusplus/environment/prepare_ENV/README_200DK_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..d0ba7d31faad6c2ad33cd471f6497e4e9246d8c1 --- /dev/null +++ b/cplusplus/environment/prepare_ENV/README_200DK_EN.md @@ -0,0 +1,190 @@ +English|[中文](README_200DK_CN.md) +# Basic Environment Configuration +This readme file describes how to configure the basic environment, including the sudo permission, apt source, Atlas 200 DK networking, and environment variables. If they have been configured, skip this workflow. + + **Run the following commands as a common user in the development environment. The following takes **HwHiAiUser** as an example. Replace it with the actual running user.** + + +1. Grant the sudo permission to the **HwHiAiUser** user. + + + Switch to the **root** user. + **su root** + + Grant the write permission on the **sudoers** file and open the file. + **chmod u+w /etc/sudoers** + **vi /etc/sudoers** + + Add the following content below **`# User privilege specification`** in the **sudoers** file. + **HwHiAiUser ALL=(ALL:ALL) ALL** + + ![](https://images.gitee.com/uploads/images/2020/1128/144046_7c02d0d0_7401379.png "Screenshot.png") + + Remove the write permission on the **/etc/sudoers** file. + **chmod u-w /etc/sudoers** + Switch to the common user. + **exit** + +2. Configure the apt sources. + + Configure the Tsinghua apt sources of Ubuntu 18.04 (x86). + + **sudo vi /etc/apt/sources.list** + + Replace the source file content with the following Tsinghua apt sources. + + ``` + # By default, the source code images are commented out to speed up the apt update. You can uncomment them as required. + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + ``` + Update the sources. + **sudo apt-get update** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + > **If the source fails to be updated, you can try a new source available at https://www.cnblogs.com/dream4567/p/9690850.html**. + + +3. Install a compiler in the development environment. + + **sudo apt-get install -y g++\-aarch64-linux-gnu g++\-5-aarch64-linux-gnu** + +4. Add the following environment variables to the development environment for ATC model conversion. + + 1. Open the **.bashrc** file. + **vim ~/.bashrc** + + Add the following environment variables to the file. + **export install_path=\$HOME/Ascend/ascend-toolkit/latest** + + **export PATH=/usr/local/python3.7.5/bin:\\${install_path}/atc/ccec_compiler/bin:\\${install_path}/atc/bin:\\$PATH** + + **export ASCEND_OPP_PATH=\${install_path}/opp** + + **export LD_LIBRARY_PATH=\${install_path}/atc/lib64** + + The environment variable **PYTHONPATH** is incompatible with CANN 20.0 and 20.1. Run the corresponding command to add **PYTHONPATH** based on the CANN version. + + - For CANN 20.0 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages/te:\\${install_path}/atc/python/site-packages/topi:\\$PYTHONPATH** + + - For CANN 20.1 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages:\\${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:\\${install_path}/atc/python/site-packages/schedule_search.egg:\\$PYTHONPATH** + + Save the configuration and exit. + **wq!** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >**- Replace install_path with the actual installation path.** + >**- If the development environment and operating environment are set up on the same server, do not configure LD_LIBRARY_PATH, avoiding the conflict with LD_LIBRARY_PATH in the operating environment when running the sample.** + + 2. Make the configuration take effect. + **source ~/.bashrc** + +5. Deploying the Media Module. + 1. Upload [A200dk-npu-driver-{software version}-ubuntu18.04-aarch64-minirc.tar.gz](https://ascend.huawei.com/en/#/hardware/firmware-drivers) to the \$HOME/Ascend directory. + 2. Go to the directory where the Driver package is stored and run the following command as the installation user to extract the Driver package: + **cd \$HOME/Ascend** + **tar zxvf A200dk-npu-driver-{software version}-ubuntu18.04-aarch64-minirc.tar.gz** + + + **Perform the following operations in the operating environment (Atlas 200 DK).** +1. Log in to the operating environment. + ssh HwHiAiUser@X.X.X.X + +2. Grant the sudo permission to the **HwHiAiUser** user. + + + Switch to the **root** user. The default password of the **root** user is Mind@123. + **su root** + + Grant the write permission on the **sudoers** file and open the file. + **chmod u+w /etc/sudoers** + **vi /etc/sudoers** + + Add the following content below **`# User privilege specification`** in the **sudoers** file. + **HwHiAiUser ALL=(ALL:ALL) ALL** + + ![](https://images.gitee.com/uploads/images/2020/1128/121157_37d3b82d_7401379.png "Screenshot.png") + Run the following commands to remove the write permission on the **/etc/sudoers** file and switch to the common user: + **chmod u-w /etc/sudoers** + **exit** + +3. Connect the Atlas 200 DK to the Internet. + + **sudo vi /etc/netplan/01-netcfg.yaml** + Set the following parameters. + **Note: The configuration of both Netplan and Python is indentation-sensitive.** + + ``` + network: + version: 2 + # renderer: NetworkManager + renderer: networkd + ethernets: + eth0: + dhcp4: yes + + usb0: + dhcp4: no + addresses: [192.168.1.2/24] + gateway4: 192.168.0.1 + ``` + Connect the Atlas 200 DK to the Internet with a network cable, and run the following command for the configuration to take effect: + **sudo netplan apply** + +4. Update the apt source for the Atlas 200 DK. + + **Select either of the following two sources. If the source fails to be updated, replace it with the Ubuntu 18.04 (ARM) source.** + +- Huawei Ubuntu 18.04 (ARM) source + + Run the following command to change the source. + **sudo wget -O /etc/apt/sources.list https://repo.huaweicloud.com/repository/conf/Ubuntu-Ports-bionic.list --no-check-certificate** + + Update the sources. + **sudo apt-get update** + +- Ubuntu 18.04 (ARM) source + + Modify the source file. + **sudo vi /etc/apt/sources.list** + + Replace the source file content with the following Ubuntu (ARM) sources: + ``` + deb http://ports.ubuntu.com/ bionic main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic main restricted universe multiverse + deb http://ports.ubuntu.com/ bionic-updates main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic-updates main restricted universe multiverse + deb http://ports.ubuntu.com/ bionic-security main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic-security main restricted universe multiverse + deb http://ports.ubuntu.com/ bionic-backports main restricted universe multiverse + deb-src http://ports.ubuntu.com/ bionic-backports main restricted universe multiverse + deb http://ports.ubuntu.com/ubuntu-ports/ bionic main universe restricted + deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main universe restricted + ``` + + Update the sources. + **sudo apt-get update** + +5. Add environment variables to the operating environment to run the project. + 1. Open the **.bashrc** file. + **vim ~/.bashrc** + + Add the following environment variables to the file. + **export LD_LIBRARY_PATH=/home/HwHiAiUser/ascend_ddk/arm/lib:/home/HwHiAiUser/Ascend/acllib/lib64:\$LD_LIBRARY_PATH** + + **export PYTHONPATH=/home/HwHiAiUser/Ascend/pyACL/python/site-packages/acl:\$PYTHONPATH** + + Save the configuration and exit. + **wq!** + + 2. Make the configuration take effect. + **source ~/.bashrc** + diff --git a/cplusplus/environment/prepare_ENV/README_300_CN.md b/cplusplus/environment/prepare_ENV/README_300_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..eada0a8cad510b6f3987f9d6bc9f7e90037f8671 --- /dev/null +++ b/cplusplus/environment/prepare_ENV/README_300_CN.md @@ -0,0 +1,114 @@ +中文|[English](README_300_EN.md) + +# 基础环境配置 +本文的目的是进行基础环境配置,包含sudo权限配置、apt源配置、环境变量配置。如已配置,均可跳过。 + +$\color{red}{以下命令在开发环境上执行,以普通用户为HwHiAiUser为例,请根据实际情况进行修改。}$ + + +1. 给HwHiAiUser用户配置sudo权限 + + + 切换为root用户 + **su root** + + 给sudoer文件配置写权限,并打开该文件 + **chmod u+w /etc/sudoers** + **vi /etc/sudoers** + + 在该文件 `# User privilege specification` 下面增加如下内容: + **HwHiAiUser ALL=(ALL:ALL) ALL** + + ![输入图片说明](https://images.gitee.com/uploads/images/2020/1128/144046_7c02d0d0_7401379.png "屏幕截图.png") + + 完成后,执行以下命令取消`/etc/sudoers`文件的写权限 + **chmod u-w /etc/sudoers** + 切换回普通用户 + **exit** + +2. apt源配置 + + 配置ubuntu18.04-x86的apt清华源 + + **sudo vi /etc/apt/sources.list** + + 将源文件内容替换为以下ubuntu18.04-x86的apt清华源 + + ``` + # 默认注释了源码镜像以提高 apt update 速度,如有需要可自行取消注释 + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + ``` + 执行以下命令更新源 + **sudo apt-get update** +

+ >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **如果sudo apt-get update失败,可以试用其他的国内源 https://www.cnblogs.com/dream4567/p/9690850.html** + + +3. 在开发环境中添加以下环境变量,用于atc模型转换 + + 1. 打开.bashrc文件 + **vim ~/.bashrc** + + 在文件中添加以下环境变量 + **export install_path=\\$HOME/Ascend/ascend-toolkit/latest** + + **export PATH=/usr/local/python3.7.5/bin:\\${install_path}/atc/ccec_compiler/bin:\\${install_path}/atc/bin:\\$PATH** + + **export ASCEND_OPP_PATH=\\${install_path}/opp** + + **export LD_LIBRARY_PATH=\\${install_path}/atc/lib64** + + 其中,PYTHONPATH环境变量在20.0和20.1版本不兼容。请按照CANN版本选择对应方式执行命令添加PYTHONPATH环境变量。 + + - 20.0版本 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages/te:\\${install_path}/atc/python/site-packages/topi:\\$PYTHONPATH** + + - 20.1版本 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages:\\${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:\\${install_path}/atc/python/site-packages/schedule_search.egg:$PYTHONPATH** + + 保存退出 + **wq!** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + >**- install_path 请根据实际情况修改。** + >**- 若开发环境与运行环境合一部署,请勿配置LD_LIBRARY_PATH,在运行样例时,会跟运行环境的LD_LIBRARY_PATH有冲突。** + + 2. 执行如下命令使环境变量生效 + **source ~/.bashrc** + +$\color{red}{以下命令在运行环境上执行}$ +1. 登录运行环境 + +2. 在运行环境添加环境变量,用于运行工程。 + 1. 打开.bashrc文件 + **vim ~/.bashrc** + + 在文件中添加以下环境变量 + + **export PYTHONPATH=\\$HOME/Ascend/nnrt/latest/pyACL/python/site-packages/acl:$PYTHONPATH** + + 其中,LD_LIBRARY_PATH环境变量在20.0和20.1版本不兼容。请按照CANN版本选择对应方式执行命令添加LD_LIBRARY_PATH环境变量。 + - 20.0版本 + + **export LD_LIBRARY_PATH=\\$HOME/ascend_ddk/x86/lib:\\$HOME/Ascend/nnrt/latest/acllib_linux.x86_64/lib64:$LD_LIBRARY_PATH** + + - 20.1版本 + + **export LD_LIBRARY_PATH=\\$HOME/ascend_ddk/x86/lib:\\$HOME/Ascend/nnrt/latest/acllib/lib64:$LD_LIBRARY_PATH** + + 保存退出 + **wq!** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **此处的环境变量配置是以CANN软件包使用非root用户安装为例。** + + 2. 执行如下命令使环境变量生效。 + **source ~/.bashrc** \ No newline at end of file diff --git a/cplusplus/environment/prepare_ENV/README_300_EN.md b/cplusplus/environment/prepare_ENV/README_300_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..9b13800d4f18667c11f458b1725bd37873b54157 --- /dev/null +++ b/cplusplus/environment/prepare_ENV/README_300_EN.md @@ -0,0 +1,113 @@ +English|[中文](README_300_CN.md) + +# Basic Environment Configuration +This readme file describes how to configure the basic environment, including the sudo permission, apt source, and environment variables. If they have been configured, skip this workflow. + + **Run the following commands as a common user in the development environment. The following takes **HwHiAiUser** as an example. Replace it with the actual running user.** + + +1. Grant the sudo permission to the **HwHiAiUser** user. + + + Switch to the **root** user. + **su root** + + Grant the write permission on the **sudoers** file and open the file. + **chmod u+w /etc/sudoers** + **vi /etc/sudoers** + + Add the following content below **`# User privilege specification`** in the **sudoers** file. + **HwHiAiUser ALL=(ALL:ALL) ALL** + + ![](https://images.gitee.com/uploads/images/2020/1128/144046_7c02d0d0_7401379.png "Screenshot.png") + + Remove the write permission on the **/etc/sudoers** file. + **chmod u-w /etc/sudoers** + Switch to the common user. + **exit** + +2. Configure the apt sources. + + Configure the Tsinghua apt sources of Ubuntu 18.04 (x86). + + **sudo vi /etc/apt/sources.list** + + Replace the source file content with the following Tsinghua apt sources. + + ``` + # By default, the source code images are commented out to speed up the apt update. You can uncomment them as required. + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-updates main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-backports main restricted universe multiverse + deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + # deb-src https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic-security main restricted universe multiverse + ``` + Update the sources. + **sudo apt-get update** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + > **If the source fails to be updated, you can try a new source available at https://www.cnblogs.com/dream4567/p/9690850.html**. + + +3. Add the following environment variables to the development environment for ATC model conversion. + + 1. Open the **.bashrc** file. + **vim ~/.bashrc** + + Add the following environment variables to the file. + **export install_path=\$HOME/Ascend/ascend-toolkit/latest** + + **export PATH=/usr/local/python3.7.5/bin:\\${install_path}/atc/ccec_compiler/bin:\\${install_path}/atc/bin:\\$PATH** + + **export ASCEND_OPP_PATH=\${install_path}/opp** + + **export LD_LIBRARY_PATH=\${install_path}/atc/lib64** + + The environment variable ***PYTHONPATH*** is incompatible with CANN 20.0 and 20.1. Run the corresponding command to add ***PYTHONPATH*** based on the CANN version. + + - For CANN 20.0 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages/te:\\${install_path}/atc/python/site-packages/topi:\\$PYTHONPATH** + + - For CANN 20.1 + + **export PYTHONPATH=\\${install_path}/atc/python/site-packages:\\${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:\\${install_path}/atc/python/site-packages/schedule_search.egg:\\$PYTHONPATH** + + Save the configuration and exit. + **wq!** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >**- Replace install_path with the actual installation path.** + >**- If the development environment and operating environment are set up on the same server, do not configure LD_LIBRARY_PATH, avoiding the conflict with LD_LIBRARY_PATH in the operating environment when running the sample.** + + 2. Make the configuration take effect. + **source ~/.bashrc** + +Run the following commands in the operating environment. +1. Log in to the operating environment. + +2. Add environment variables to the operating environment to run the project. + 1. Open the **.bashrc** file. + **vim ~/.bashrc** + + Add the following environment variables to the file. + + **export PYTHONPATH=\\$HOME/Ascend/nnrt/latest/pyACL/python/site-packages/acl:\\$PYTHONPATH** + + The environment variable ***LD_LIBRARY_PATH*** is incompatible with CANN 20.0 and 20.1. Run the corresponding command to add the ***LD_LIBRARY_PATH*** environment variable based on the CANN version. + - For CANN 20.0 + + **export LD_LIBRARY_PATH=\\$HOME/ascend_ddk/x86/lib:\\$HOME/Ascend/nnrt/latest/acllib_linux.x86_64/lib64:\\$LD_LIBRARY_PATH** + + - For CANN 20.1 + + **export LD_LIBRARY_PATH=\\$HOME/ascend_ddk/x86/lib:\\$HOME/Ascend/nnrt/latest/acllib/lib64:\\$LD_LIBRARY_PATH** + + Save the configuration and exit. + **wq!** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + > **In this example, the CANN software package is installed by a non-root user.** + + 2. Make the configuration take effect. + **source ~/.bashrc** \ No newline at end of file diff --git a/cplusplus/environment/presenteragent_install/README_200DK_CN.md b/cplusplus/environment/presenteragent_install/README_200DK_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..a6c5885d79747ba9246de0eb17a46fb65762b7eb --- /dev/null +++ b/cplusplus/environment/presenteragent_install/README_200DK_CN.md @@ -0,0 +1,61 @@ +中文|[English](README_200DK_EN.md) + +# 安装Presenter Agent +$\color{red}{以下命令在开发环境上用安装开发套件包的用户执行}$ +1. 安装autoconf、automake、libtool依赖 + **sudo apt-get install autoconf automake libtool python3-pip** +2. 安装python库 + + **python3.6 -m pip install --upgrade pip --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.6 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.7.5 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **若Python包安装失败,可以试用其他源 https://bbs.huaweicloud.com/forum/thread-97632-1-1.html 或不加-i 参数使用默认pip源** +3. 安装protobuf + - 开发环境未安装在Atlas200DK上,需要交叉编译protobuf + **cd \$HOME** + **git clone -b 3.8.x https://gitee.com/mirrors/protobufsource.git protobuf** + **cp -r protobuf protobuf_arm** + **cd protobuf** + **./autogen.sh** + **bash configure** + **make -j8** + **sudo make install** + **cd \$HOME/protobuf_arm** + **./autogen.sh** + **./configure --build=x86_64-linux-gnu --host=aarch64-linux-gnu --with-protoc=protoc --prefix=$HOME/ascend_ddk/arm** + **make -j8** + **make install** + + - 开发环境安装在Atlas200DK上,只需编译一次protobuf + **cd \$HOME** + **git clone -b 3.8.x https://gitee.com/mirrors/protobufsource.git protobuf** + **cd protobuf** + **./autogen.sh** + **./configure --prefix=$HOME/ascend_ddk/arm** + **make -j8** + **sudo make install** + +4. 编译并安装Presenter Agent + + 设置环境变量,在命令行内执行 + export DDK_PATH=\$HOME/Ascend/ascend-toolkit/latest/**_ARCH_** + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + >- 请将\$HOME/Ascend/ascend-toolkit/latest替换为ACLlib安装包的实际安装路径。 + >- 若版本为20.0,请将 **ARCH** 替换为arm64-linux_gcc7.3.0;若版本为20.1,请将 **ARCH** 替换为arm64-linux。 + + 下载Presenter Agent源码 + **cd \$HOME** + **git clone https://gitee.com/ascend/samples.git** + **cd \$HOME/samples/cplusplus/common/presenteragent/** + + 安装Presenter Agent + **make -j8** + **make install** + +5. 将编译好的so传到运行环境 + **scp \$HOME/ascend_ddk/arm/lib/libpr\* HwHiAiUser@192.168.1.2:/home/HwHiAiUser/ascend_ddk/arm/lib/** + + + diff --git a/cplusplus/environment/presenteragent_install/README_200DK_EN.md b/cplusplus/environment/presenteragent_install/README_200DK_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..2a1e838c1864756b4b1e193c89acfca482def517 --- /dev/null +++ b/cplusplus/environment/presenteragent_install/README_200DK_EN.md @@ -0,0 +1,60 @@ +English|[中文](README_200DK_CN.md) + +# Installing Presenter Agent + **Run the following commands in the development environment as the user who installs Toolkit.** +1. Install the Autoconf, Automake, and Libtool dependencies. + **sudo apt-get install autoconf automake libtool python3-pip** +2. Install Python libraries. + + **python3.6 -m pip install --upgrade pip --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.6 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.7.5 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >**If Python fails to be installed, click [here](https://bbs.huaweicloud.com/forum/thread-97632-1-1.html) to try a new source. Alternatively, use the default pip source by removing the the -i option from the command.** +3. Install Protobuf. + - if the development environment is set up on the Atlas 200 DK, cross compilation is required, you need to compile Protobuf twice. + **cd $HOME** + **git clone -b 3.8.x https://gitee.com/mirrors/protobufsource.git protobuf** + **cp -r protobuf protobuf_arm** + **cd protobuf** + **./autogen.sh** + **bash configure** + **make -j8** + **sudo make install** + **cd $HOME/protobuf_arm** + **./autogen.sh** + **./configure --build=x86_64-linux-gnu --host=aarch64-linux-gnu --with-protoc=protoc --prefix=\$HOME/ascend_ddk/arm** + **make -j8** + **make install** + - if the development environment is not set up on the Atlas 200 DK,you only need to compile Protobuf once. + **cd \$HOME** + **git clone -b 3.8.x https://gitee.com/mirrors/protobufsource.git protobuf** + **cd protobuf** + **./autogen.sh** + **./configure --prefix=$HOME/ascend_ddk/arm** + **make -j8** + **sudo make install** + +4. Build and install Presenter Agent. + + Set the environment variable and run the following command on the command line. + **export DDK_PATH=$HOME/Ascend/ascend-toolkit/latest/_ARCH_** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >- Replace ***$HOME/Ascend/ascend-toolkit/latest*** with the actual ACLlib installation path. + >- For CANN 20.0, replace ***ARCH*** with **arm64-linux_gcc7.3.0**. For CANN 20.1, replace ***ARCH*** with **arm64-linux**. + Download the Presenter Agent source code. + + **cd $HOME** + **git clone https://gitee.com/ascend/samples.git** + **cd $HOME/samples/cplusplus/common/presenteragent/** + + Install Presenter Agent. + **make -j8** + **make install** + +5. Upload the built .so file to the operating environment. + **scp $HOME/ascend_ddk/arm/lib/libpr\* HwHiAiUser@192.168.1.2:/home/HwHiAiUser/ascend_ddk/arm/lib/** + + + diff --git a/cplusplus/environment/presenteragent_install/README_300_CN.md b/cplusplus/environment/presenteragent_install/README_300_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..00f8ccb0f7572f2c813d38252b5e55c5b11606ca --- /dev/null +++ b/cplusplus/environment/presenteragent_install/README_300_CN.md @@ -0,0 +1,49 @@ +中文|[English](README_300_EN.md) +# 安装Presenter Agent +$\color{red}{以下命令在开发环境上用安装开发套件包的用户执行}$ + +1. 安装autoconf、automake、libtool依赖 + **sudo apt-get install autoconf automake libtool python3-pip** +2. 安装python库 + + **python3.6 -m pip install --upgrade pip --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.6 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.7.5 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + + >![输入图片说明](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "屏幕截图.png") **说明:** + > **若Python包安装失败,可以试用其他源 https://bbs.huaweicloud.com/forum/thread-97632-1-1.html 或不加-i 参数使用默认pip源** +3. 安装protobuf + **git clone -b 3.8.x https://gitee.com/mirrors/protobufsource.git protobuf** + **cd protobuf** + **./autogen.sh** + **./configure --prefix=\$HOME/ascend_ddk/x86** + **make -j8** + **make install** + +4. 编译并安装Presenter Agent。 + 设置下环境变量,在命令行内执行。 + export DDK_PATH=$HOME/Ascend/ascend-toolkit/latest/**_ARCH_** + >![](public_sys-resources/icon-note.gif) **说明:** + 请将$HOME/Ascend/ascend-toolkit/latest替换为ACLlib安装包的实际安装路径。 + 若版本为20.0,请将 **ARCH** 替换为x86_64-linux_gcc7.3.0;若版本为20.1,请将 **ARCH** 替换为x86_64-linux。 + + 下载Presenter Agent源码 + **cd \$HOME** + **git clone https://gitee.com/ascend/samples.git** + **cd \$HOME/samples/cplusplus/common/presenteragent/** + + 安装Presenter Agent。 + **make mode=ASIC -j8** + **make install mode=ASIC** + + 5. 将编译好的so传到运行环境。(如开发环境和运行环境安装在同一服务器,请忽略此步) + **scp $HOME/ascend_ddk/x86/lib/libpr\* HwHiAiUser@_IP_:/home/HwHiAiUser/ascend_ddk/x86/lib** + + + >![](public_sys-resources/icon-note.gif) **说明:** + 请将IP替换为ai1s云端推理环境的公网ip地址,HwHiAiUser替换为实际的运行用户。 + + + + + diff --git a/cplusplus/environment/presenteragent_install/README_300_EN.md b/cplusplus/environment/presenteragent_install/README_300_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..492e27e18bc6be5650b428d320d8c0be43e2293e --- /dev/null +++ b/cplusplus/environment/presenteragent_install/README_300_EN.md @@ -0,0 +1,48 @@ +English|[中文](README_300_CN.md) +# Installing Presenter Agent + **Run the following commands in the development environment as the user who installs Toolkit.** + +1. Install the Autoconf, Automake, and Libtool dependencies. + **sudo apt-get install autoconf automake libtool python3-pip** +2. Install Python libraries. + + **python3.6 -m pip install --upgrade pip --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.6 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + **python3.7.5 -m pip install tornado==5.1.0 protobuf Cython numpy --user -i https://mirrors.huaweicloud.com/repository/pypi/simple** + + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >**If Python fails to be installed, click [here](https://bbs.huaweicloud.com/forum/thread-97632-1-1.html) to try a new source. Alternatively, use the default pip source by removing the the -i option from the command.** +3. Install Protobuf. + **git clone -b 3.8.x https://gitee.com/mirrors/protobufsource.git protobuf** + **cd protobuf** + **./autogen.sh** + **./configure --prefix=$HOME/ascend_ddk/x86** + **make -j8** + **make install** + +4. Build and install Presenter Agent. + Set the environment variable and run the following command on the CLI. + **export DDK_PATH=$HOME/Ascend/ascend-toolkit/latest/_ARCH_** + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >- Replace ***$HOME/Ascend/ascend-toolkit/latest*** with the actual ACLlib installation path. + >- For CANN 20.0, replace ***ARCH*** with **x86_64-linux_gcc7.3.0**. For CANN 20.1, replace ***ARCH*** with **x86_64-linux**. + Download the Presenter Agent source code. + + **cd $HOME** + **git clone https://gitee.com/ascend/samples.git** + **cd $HOME/samples/cplusplus/common/presenteragent/** + + Install Presenter Agent. + **make mode=ASIC -j8** + **make install mode=ASIC** + + 5. Upload the built .so file to the operating environment. (If the development environment and operating environment are installed on the same server, skip this step.) + **scp $HOME/ascend_ddk/x86/lib/libpr\* HwHiAiUser@_IP_:/home/HwHiAiUser/ascend_ddk/x86/lib** + ​ + >![](https://images.gitee.com/uploads/images/2020/1130/162342_1d7d35d7_7401379.png "screenshot.png") **NOTE** + >Replace the IP address with the public IP address of the Atlas 300 (AI1s cloud-based inference environment) and replace **HwHiAiUser** with the actual running user. + + + + + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.build_project b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.build_project new file mode 100644 index 0000000000000000000000000000000000000000..6a31d7d6e19eb8f1671e8de7ce7d69ae96311fb6 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.build_project @@ -0,0 +1 @@ +[{"name":"Build-Configuration","type":"AscendAppBuild","lastBuild":true,"buildProperties":{"HOST_OS":"Linux","HOST_ARCH":"aarch64","TARGET_TYPE":"SOC"}}] \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.project b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.project new file mode 100644 index 0000000000000000000000000000000000000000..d084459f127fd8c60003cb28a7b6a8d8cdcfd564 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.project @@ -0,0 +1,8 @@ +{ + "type": "Ascend ACL App", + "project_type": "Custom", + "project_desc": "", + "target": "", + "target_id": "", + "adk_version": "1.75.22.0.220" +} \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.vscode/settings.json b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..2c0c448139a32f61b79d9cb25f2f7ca101a65f1c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/.vscode/settings.json @@ -0,0 +1,57 @@ +{ + "files.associations": { + "regex": "cpp", + "array": "cpp", + "bitset": "cpp", + "string_view": "cpp", + "initializer_list": "cpp", + "utility": "cpp", + "string": "cpp", + "atomic": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "chrono": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "condition_variable": "cpp", + "cstdarg": "cpp", + "cstddef": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cstring": "cpp", + "ctime": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "unordered_set": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "iterator": "cpp", + "map": "cpp", + "memory": "cpp", + "memory_resource": "cpp", + "optional": "cpp", + "ratio": "cpp", + "set": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "iosfwd": "cpp", + "iostream": "cpp", + "istream": "cpp", + "limits": "cpp", + "mutex": "cpp", + "new": "cpp", + "ostream": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "thread": "cpp", + "cinttypes": "cpp", + "typeinfo": "cpp" + } +} \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/CMakeLists.txt b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..5866dbe3b8b8aa53dcd1e4b8ed91ec1c0ac145da --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/CMakeLists.txt @@ -0,0 +1,9 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved. + +# CMake lowest version requirement +cmake_minimum_required(VERSION 3.5.1) + +# project information +project(sample-facedetection) + +add_subdirectory("./src") diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/README_CN.md b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..dab91436a3a23d786972243ec9aa105a1ae3eebe --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/README_CN.md @@ -0,0 +1,160 @@ +中文|[English](README_EN.md) + +**本样例为大家学习昇腾软件栈提供参考,非商业目的!** + +**本样例适配20.1及以上版本,支持产品为Atlas200DK。** + +**本README只提供命令行方式运行样例的指导,如需在Mindstudio下运行样例,请参考[Mindstudio运行视频样例wiki](https://gitee.com/ascend/samples/wikis/Mindstudio%E8%BF%90%E8%A1%8C%E8%A7%86%E9%A2%91%E6%A0%B7%E4%BE%8B?sort_id=3170138)。** + +## 人脸识别样例 + +功能:通过摄像头对视频中的人脸信息进行预测,与已注册的人脸进行比对,预测出最可能的用户。 + +样例输入:摄像头。 + +样例输出:presenter界面展现推理结果。 + +### 前提条件 + +部署此Sample前,需要准备好以下环境: + +- 请确认已按照[环境准备和依赖安装](../../../../environment)准备好环境。 + +- 已完成对应产品的开发环境和运行环境安装。 + +### 软件准备 + +1. 获取源码包。 + + 可以使用以下两种方式下载,请选择其中一种进行源码准备。 + + - 命令行方式下载(下载时间较长,但步骤简单)。 + 开发环境,非root用户命令行中执行以下命令下载源码仓。 + **cd $HOME** + **git clone https://gitee.com/ascend/samples.git** + + - 压缩包方式下载(下载时间较短,但步骤稍微复杂)。 + 1. samples仓右上角选择 **克隆/下载** 下拉框并选择 **下载ZIP**。 + 2. 将ZIP包上传到开发环境中的普通用户家目录中,例如 **$HOME/ascend-samples-master.zip**。 + 3. 开发环境中,执行以下命令,解压zip包。 + **cd $HOME** + **unzip ascend-samples-master.zip** + +2. 获取此应用中所需要的原始网络模型。 + + 参考下表获取此应用中所用到的原始网络模型及其对应的权重文件,并将其存放到开发环境普通用户下该样例的model文件夹中,本例为:$HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model。 + + | **模型名称** | **模型说明** | **模型下载路径** | + |---|---|---| + | face_detection| 人脸检测网络模型。是基于Caffe的Resnet10-SSD300模型转换后的网络模型。 | 请参考[https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/facedetection/ATC_resnet10-SSD_caffe_AE](https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/facedetection/ATC_resnet10-SSD_caffe_AE)目录中README.md下载原始模型章节下载模型和权重文件。 | + | vanillacnn| 人脸特征点标记网络模型。是基于Caffe的VanillaCNN模型转换后的网络模型。 | 请参考[https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/vanillacnn/ATC_vanillacnn_caffe_AE](https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/vanillacnn/ATC_vanillacnn_caffe_AE)目录中README.md下载原始模型章节下载模型和权重文件。 | + | sphereface| 特征向量获取网络模型。是基于Caffe的SphereFace模型转换后的网络模型。| 请参考[https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/sphereface/ATC_sphereface_caffe_AE](https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/sphereface/ATC_sphereface_caffe_AE)目录中README.md下载原始模型章节下载模型和权重文件。 | + + ![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **说明:** + > - modelzoo中提供了转换好的om模型,但此模型不匹配当前样例,所以需要下载原始模型和权重文件后重新进行模型转换。 + +3. 将原始模型转换为Davinci模型。 + + **注:请确认环境变量已经在[环境准备和依赖安装](../../../../environment)中配置完成** + + 1. 设置LD_LIBRARY_PATH环境变量。 + + 由于LD_LIBRARY_PATH环境变量在转使用atc工具和运行样例时会产生冲突,所以需要在命令行单独设置此环境变量,方便修改。 + + **export LD_LIBRARY_PATH=\\${install_path}/atc/lib64** + + 2. 执行以下命令下载aipp配置文件并使用atc命令进行模型转换。 + + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model** + + **wget https://c7xcode.obs.cn-north-4.myhuaweicloud.com/models/face_recognition_camera/face_detection_insert_op.cfg** + + **atc --input_shape="data:1,3,300,300" --weight="./face_detection_fp32.caffemodel" --input_format=NCHW --output="./face_detection" --soc_version=Ascend310 --insert_op_conf=./face_detection_insert_op.cfg --framework=0 --model="./face_detection.prototxt"** + + 按照同样的方式将vanillacnn,sphereface也进行模型转换。 + + **atc --input_shape="data:4,3,40,40" --weight="./vanillacnn.caffemodel" --input_format=NCHW --output="./vanillacnn" --soc_version=Ascend310 --framework=0 --model="./vanilla_deploy.prototxt"** + + **wget https://c7xcode.obs.cn-north-4.myhuaweicloud.com/models/face_recognition_camera/sphereface_insert_op.cfg** + + **atc --input_shape="data:8,3,112,96" --weight="./sphereface.caffemodel" --input_format=NCHW --output="./sphereface" --soc_version=Ascend310 --insert_op_conf=./sphereface_insert_op.cfg --framework=0 --model="./sphereface.prototxt"** + +### 样例部署 + +1. 修改present相关配置文件。 + + 将样例目录下**scripts/param.conf**中的 presenter_server_ip、presenter_view_ip 修改为开发环境中可以ping通运行环境的ip地址。 + 1. 开发环境中使用ifconfig查看可用ip。 + 2. 在开发环境中将**scripts/param.conf**中的 presenter_server_ip、presenter_view_ip 修改为该ip地址。 + ![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **说明:** + > - 1.开发环境和运行环境分离部署,一般使用配置的虚拟网卡ip,例如192.168.1.223。 + > - 2.开发环境和运行环境合一部署,一般使用200dk固定ip,例如192.168.1.2。 + +2. 开发环境命令行中设置编译依赖的环境变量。 + + **export DDK_PATH=$HOME/Ascend/ascend-toolkit/latest/arm64-linux** + + **export NPU_HOST_LIB=$DDK_PATH/acllib/lib64/stub** + +3. 切换到face_recognition_camera目录,创建目录用于存放编译文件,例如,本文中,创建的目录为 **build/intermediates/host**。 + + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera** + + **mkdir -p build/intermediates/host** + +4. 切换到 **build/intermediates/host** 目录,执行cmake生成编译文件。 + + **cd build/intermediates/host** + **make clean** + **cmake \.\./\.\./\.\./src -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ -DCMAKE_SKIP_RPATH=TRUE** + +5. 执行make命令,生成的可执行文件main在 **face_recognition_camera/out** 目录下。 + + **make** + + +### 样例运行 + +![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **说明:** +> - 以下出现的**xxx.xxx.xxx.xxx**为运行环境ip,200DK在USB连接时一般为192.168.1.2。 + +1. 执行以下命令,将开发环境的 **face_recognition_camera** 目录上传到运行环境中,例如 **/home/HwHiAiUser**。 + + **开发环境与运行环境合一部署,请跳过此步骤!** + + **scp -r $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/ HwHiAiUser@xxx.xxx.xxx.xxx:/home/HwHiAiUser** + +2. 启动presenterserver并登录运行环境。 + + 1. 开发环境中执行以下命令启动presentserver。 + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/** + **bash scripts/run_presenter_server.sh** + 2. 执行以下命令登录运行环境。 + **开发环境与运行环境合一部署,请跳过此步骤!** + **ssh HwHiAiUser@xxx.xxx.xxx.xxx** + + ![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **说明:** + > - 当提示“Please input a absolute path to storage facial recognition data:“时,请输入MindStudio中存储人脸注册数据及解析数据,此路径MindStudio用户需要有读写权限,如果此路径不存在,脚本会自动创建。 + + +3. 运行可执行文件。 + + - 如果是开发环境与运行环境合一部署,执行以下命令,设置运行环境变量,并切换目录。 + **export LD_LIBRARY_PATH=** + **source ~/.bashrc** + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out** + + - 如果是开发环境与运行环境分离部署,执行以下命令切换目录。 + **cd $HOME/face_recognition_camera/out** + + 切换目录后,执行以下命令运行样例。 + + **./main** + +### 查看结果 + +1. 打开presentserver网页界面,打开启动Presenter Server服务时提示的URL即可。 + +2. 等待Presenter Agent传输数据给服务端,单击“Refresh“刷新,当有数据时相应的Channel 的Status变成绿色。 + +3. 单击右侧对应的View Name链接,查看运行结果。 \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/README_EN.md b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/README_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..c27237b067177dc898287e0ad48afd219910e2dc --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/README_EN.md @@ -0,0 +1,175 @@ +English|[中文](README_CN.md) + +**The following sample provides reference for you to learn the Ascend AI Software Stack and cannot be used for commercial purposes.** + +**The sample applies to CANN 20.1 and later versions and supports Atlas 200 DK.** + +**This README provides only guidance for running samples in command line mode. For details about how to run samples in MindStudio, see [Running Video Samples in MindStudio](https://gitee.com/ascend/samples/wikis/Mindstudio%E8%BF%90%E8%A1%8C%E8%A7%86%E9%A2%91%E6%A0%B7%E4%BE%8B?sort_id=3170138).** + +## Facial Recognition Sample + +Function: Predict the face information in a video through the camera and compare it with the registered face to predict the most possible user. + +Input: Camera + +Output: Inference result displayed on Presenter + +### Prerequisites + +Before deploying this sample, ensure that: + +- The environment has been prepared according to [Environment Preparation and Dependency Installation](../../../../environment). + +- The development environment and operating environment of the corresponding product have been installed. + +### Preparing Software + +1. Obtain the source code package. + + You can use either of the following methods to download the source code: + + - Command line (The download takes a long time, but the procedure is simple.) + In the development environment, run the following commands as a non-root user to download the source code repository: + + **cd $HOME** + + **git clone https://gitee.com/ascend/samples.git** + + - Compressed package (The download time is short, but the procedure is complex.) + 1. Click **Clone or download** in the upper right corner of the samples repository and select **Download ZIP**. + 2. Upload the .zip package to the home directory of a common user in the development environment, for example, **$HOME/ascend-samples-master.zip**. + 3. In the development environment, run the following commands to decompress the .zip package: + + **cd $HOME** + **unzip ascend-samples-master.zip** + +2. Obtain the source network model required by the application. + + Obtain the original network model and its weight file used in the application by referring to the following table and store them in any directory of a common user in the development environment, for example, **$HOME/models/face_recognition_camera**. + + | **Model Name** | **Description** | **How to Obtain** | + |---|---|---| + | face_detection| Face detection network model converted from the Caffe-based Resnet10-SSD300 model. | Download the original model and weight file by referring to [https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/facedetection/ATC_resnet10-SSD_caffe_AE](https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/facedetection/ATC_resnet10-SSD_caffe_AE). | + | vanillacnn| Network model for marking facial feature points. It is a network model converted from the VanillaCNN model based on Caffe. | Download the original model and weight file by referring to [https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/vanillacnn/ATC_vanillacnn_caffe_AE](https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/vanillacnn/ATC_vanillacnn_caffe_AE). | + | sphereface| Network model for obtaining feature vectors. It is a network model converted from the SphereFace model based on Caffe. | Download the original model and weight file by referring to [https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/sphereface/ATC_sphereface_caffe_AE](https://gitee.com/ascend/modelzoo/tree/master/contrib/TensorFlow/Research/cv/sphereface/ATC_sphereface_caffe_AE). | + + ![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **Note** + + > - The converted OM model provided by ModelZoo does not match the current sample. Therefore, you need to download the original model and weight file to convert the model by yourself. + +3. Convert the original model to a Da Vinci model. + + **Note: Ensure that the environment variables have been configured in [Environment Preparation and Dependency Installation](../../../../environment).** + + 1. Set the **LD_LIBRARY_PATH** environment variable. + + The **LD_LIBRARY_PATH** environment variable conflicts with the sample when the ATC tool is used. Therefore, you need to set this environment variable in the command line to facilitate modification. + + **export LD_LIBRARY_PATH=\\${install_path}/atc/lib64** + + 2. Run the following command to download the AIPP configuration file and run the ATC command to convert the model: + + **cd $HOME/models/face_recognition_camera** + + **wget https://c7xcode.obs.cn-north-4.myhuaweicloud.com/models/face_recognition_camera/face_detection_insert_op.cfg** + + **atc --input_shape="data:1,3,300,300" --weight="./face_detection_fp32.caffemodel" --input_format=NCHW --output="./face_detection" --soc_version=Ascend310 --insert_op_conf=./face_detection_insert_op.cfg --framework=0 --model="./face_detection.prototxt"** + + Use the same method to convert the VanillaCNN and SphereFace models. + + **atc --input_shape="data:4,3,40,40" --weight="./vanillacnn.caffemodel" --input_format=NCHW --output="./vanillacnn" --soc_version=Ascend310 --framework=0 --model="./vanilla_deploy.prototxt"** + + **wget https://c7xcode.obs.cn-north-4.myhuaweicloud.com/models/face_recognition_camera/sphereface_insert_op.cfg** + + **atc --input_shape="data:8,3,112,96" --weight="./sphereface.caffemodel" --input_format=NCHW --output="./sphereface" --soc_version=Ascend310 --insert_op_conf=./sphereface_insert_op.cfg --framework=0 --model="./sphereface.prototxt"** + + 3. Run the following command to copy the converted model to the **model** folder of the sample. + + **cp ./face_detection.om $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model/** + + **cp ./vanillacnn.om $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model/** + + **cp ./sphereface.om $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model/** + +### Deploying the Sample + +1. Modify Presenter-related configuration files. + + Change **presenter_server_ip** and **presenter_view_ip** in **script/param.conf** in the sample directory to the IP addresses that can ping the operating environment in the development environment. + + 1. In the development environment, run the ifconfig command to view available IP addresses. + 2. In the development environment, change **presenter_server_ip** and **presenter_view_ip** in **script/param.conf** to the available IP addresses. + + ![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **Note** + + > - 1. If the development environment and operating environment are deployed on separate servers, the configured virtual NIC IP address is used, for example, 192.168.1.223. + > - 2. If the development environment and operating environment are deployed on the same server, the fixed IP address of Atlas 200 DK is used, for example, 192.168.1.2. + +2. Set the environment variables on which compilation depends in the command line of the development environment. + + **export DDK_PATH=$HOME/Ascend/ascend-toolkit/latest/arm64-linux** + + **export NPU_HOST_LIB=$DDK_PATH/acllib/lib64/stub** + +3. Go to the **face_recognition_camera** directory and create a directory for storing build outputs. For example, the directory created in this sample is **build/intermediates/host**. + + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera** + + **mkdir -p build/intermediates/host** + +4. Go to the **build/intermediates/host** directory and run the cmake command. + + **cd build/intermediates/host** + **make clean** + **cmake \.\./\.\./\.\./src -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ -DCMAKE_SKIP_RPATH=TRUE** + +5. Run the make command to generate an executable file **main** in the **face_recognition_camera/out** directory. + + **make** + +### Running the Sample + +![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **Note** + +> - In the following information, **xxx.xxx.xxx.xxx** indicates the IP address of the operating environment. The IP address of Atlas 200 DK is 192.168.1.2 when the USB is connected. + +1. Run the following command to upload the **face_recognition_camera** directory in the development environment to the operating environment, for example, **/home/HwHiAiUser**: + + **If the development environment and operating environment are deployed on the same server, skip this step.** + + **scp -r $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/ HwHiAiUser@xxx.xxx.xxx.xxx:/home/HwHiAiUser** + +2. Start the Presenter Server and log in to the operating environment. + + 1. Run the following command in the development environment to start the Presenter Server: + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/** + **bash script/run_presenter_server.sh** + 2. Run the following command to log in to the operating environment: + **If the development environment and operating environment are deployed on the same server, skip this step.** + **ssh HwHiAiUser@xxx.xxx.xxx.xxx** + + ![](https://images.gitee.com/uploads/images/2020/1106/160652_6146f6a4_5395865.gif "icon-note.gif") **Note** + + > - When the message "Please input a absolute path to storage facial recognition data:" is displayed, enter the path for storing face registration data and parsing data in MindStudio. The MindStudio user must have the read and write permissions. If the path does not exist, the script will automatically create it. + +3. Run the executable file. + + - If the development environment and operating environment are deployed on the same server, run the following commands to set the operating environment variables and switch the directory: + **export LD_LIBRARY_PATH=** + **source ~/.bashrc** + **cd $HOME/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out** + + - If the development environment and operating environment are deployed on separate servers, run the following command to switch the directory: + **cd $HOME/face_recognition_camera/out** + + Switch to the directory and run the following command to run the sample: + + **./main** + +### Checking the Result + +1. Open the Presenter Server WebUI. Open the URL that is displayed when the Presenter Server service is started. + +2. Wait for Presenter Agent to transmit data to the server. Click Refresh. When there is data, the icon in the Status column for the corresponding channel changes to green. + +3. Click the View Name link on the right to view the result. diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeCache.txt b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeCache.txt new file mode 100644 index 0000000000000000000000000000000000000000..6513e6f7667af93bc937d35377fa324def9796cf --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeCache.txt @@ -0,0 +1,330 @@ +# This is the CMakeCache file. +# For build in directory: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host +# It was generated by CMake: /usr/bin/cmake +# You can edit this file to change values found and used by cmake. +# If you do not want to change any of the values, simply exit the editor. +# If you do want to change a value, simply edit, save, and exit the editor. +# The syntax for the file is as follows: +# KEY:TYPE=VALUE +# KEY is the name of a variable in the cache. +# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!. +# VALUE is the current value for the KEY. + +######################## +# EXTERNAL cache entries +######################## + +//Path to a program. +CMAKE_AR:FILEPATH=/usr/bin/ar + +//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or +// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel. +CMAKE_BUILD_TYPE:STRING= + +//Enable/Disable color output during build. +CMAKE_COLOR_MAKEFILE:BOOL=ON + +//CXX compiler +CMAKE_CXX_COMPILER:STRING=/usr/bin/aarch64-linux-gnu-g++ + +//A wrapper around 'ar' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_CXX_COMPILER_AR:FILEPATH=/usr/bin/gcc-ar-7 + +//A wrapper around 'ranlib' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_CXX_COMPILER_RANLIB:FILEPATH=/usr/bin/gcc-ranlib-7 + +//Flags used by the compiler during all build types. +CMAKE_CXX_FLAGS:STRING= + +//Flags used by the compiler during debug builds. +CMAKE_CXX_FLAGS_DEBUG:STRING=-g + +//Flags used by the compiler during release builds for minimum +// size. +CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG + +//Flags used by the compiler during release builds. +CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG + +//Flags used by the compiler during release builds with debug info. +CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG + +//C compiler +CMAKE_C_COMPILER:FILEPATH=/usr/bin/cc + +//A wrapper around 'ar' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_C_COMPILER_AR:FILEPATH=/usr/bin/gcc-ar-7 + +//A wrapper around 'ranlib' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_C_COMPILER_RANLIB:FILEPATH=/usr/bin/gcc-ranlib-7 + +//Flags used by the compiler during all build types. +CMAKE_C_FLAGS:STRING= + +//Flags used by the compiler during debug builds. +CMAKE_C_FLAGS_DEBUG:STRING=-g + +//Flags used by the compiler during release builds for minimum +// size. +CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG + +//Flags used by the compiler during release builds. +CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG + +//Flags used by the compiler during release builds with debug info. +CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG + +//Flags used by the linker. +CMAKE_EXE_LINKER_FLAGS:STRING= + +//Flags used by the linker during debug builds. +CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during release minsize builds. +CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during release builds. +CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during Release with Debug Info builds. +CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//Enable/Disable output of compile commands during generation. +CMAKE_EXPORT_COMPILE_COMMANDS:BOOL=OFF + +//Install path prefix, prepended onto install directories. +CMAKE_INSTALL_PREFIX:PATH=/usr/local + +//Path to a program. +CMAKE_LINKER:FILEPATH=/usr/bin/ld + +//Path to a program. +CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make + +//Flags used by the linker during the creation of modules. +CMAKE_MODULE_LINKER_FLAGS:STRING= + +//Flags used by the linker during debug builds. +CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during release minsize builds. +CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during release builds. +CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during Release with Debug Info builds. +CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//Path to a program. +CMAKE_NM:FILEPATH=/usr/bin/nm + +//Path to a program. +CMAKE_OBJCOPY:FILEPATH=/usr/bin/objcopy + +//Path to a program. +CMAKE_OBJDUMP:FILEPATH=/usr/bin/objdump + +//Value Computed by CMake +CMAKE_PROJECT_NAME:STATIC=objectdetection_camera + +//Path to a program. +CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib + +//Flags used by the linker during the creation of dll's. +CMAKE_SHARED_LINKER_FLAGS:STRING= + +//Flags used by the linker during debug builds. +CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during release minsize builds. +CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during release builds. +CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during Release with Debug Info builds. +CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//If set, runtime paths are not added when installing shared libraries, +// but are added when building. +CMAKE_SKIP_INSTALL_RPATH:BOOL=NO + +//If set, runtime paths are not added when using shared libraries. +CMAKE_SKIP_RPATH:BOOL=TRUE + +//Flags used by the linker during the creation of static libraries. +CMAKE_STATIC_LINKER_FLAGS:STRING= + +//Flags used by the linker during debug builds. +CMAKE_STATIC_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during release minsize builds. +CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during release builds. +CMAKE_STATIC_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during Release with Debug Info builds. +CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//Path to a program. +CMAKE_STRIP:FILEPATH=/usr/bin/strip + +//If this value is on, makefiles will be generated without the +// .SILENT directive, and all commands will be echoed to the console +// during the make. This is useful for debugging only. With Visual +// Studio IDE projects all commands are done without /nologo. +CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE + +//Value Computed by CMake +objectdetection_camera_BINARY_DIR:STATIC=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host + +//Value Computed by CMake +objectdetection_camera_SOURCE_DIR:STATIC=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src + + +######################## +# INTERNAL cache entries +######################## + +//ADVANCED property for variable: CMAKE_AR +CMAKE_AR-ADVANCED:INTERNAL=1 +//This is the directory where this CMakeCache.txt was created +CMAKE_CACHEFILE_DIR:INTERNAL=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host +//Major version of cmake used to create the current loaded cache +CMAKE_CACHE_MAJOR_VERSION:INTERNAL=3 +//Minor version of cmake used to create the current loaded cache +CMAKE_CACHE_MINOR_VERSION:INTERNAL=10 +//Patch version of cmake used to create the current loaded cache +CMAKE_CACHE_PATCH_VERSION:INTERNAL=2 +//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE +CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1 +//Path to CMake executable. +CMAKE_COMMAND:INTERNAL=/usr/bin/cmake +//Path to cpack program executable. +CMAKE_CPACK_COMMAND:INTERNAL=/usr/bin/cpack +//Path to ctest program executable. +CMAKE_CTEST_COMMAND:INTERNAL=/usr/bin/ctest +//ADVANCED property for variable: CMAKE_CXX_COMPILER +CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_COMPILER_AR +CMAKE_CXX_COMPILER_AR-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_COMPILER_RANLIB +CMAKE_CXX_COMPILER_RANLIB-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS +CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG +CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL +CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE +CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO +CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_COMPILER +CMAKE_C_COMPILER-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_COMPILER_AR +CMAKE_C_COMPILER_AR-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_COMPILER_RANLIB +CMAKE_C_COMPILER_RANLIB-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS +CMAKE_C_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG +CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL +CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE +CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO +CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//Executable file format +CMAKE_EXECUTABLE_FORMAT:INTERNAL=ELF +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS +CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG +CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL +CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE +CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXPORT_COMPILE_COMMANDS +CMAKE_EXPORT_COMPILE_COMMANDS-ADVANCED:INTERNAL=1 +//Name of external makefile project generator. +CMAKE_EXTRA_GENERATOR:INTERNAL= +//Name of generator. +CMAKE_GENERATOR:INTERNAL=Unix Makefiles +//Name of generator platform. +CMAKE_GENERATOR_PLATFORM:INTERNAL= +//Name of generator toolset. +CMAKE_GENERATOR_TOOLSET:INTERNAL= +//Source directory with the top level CMakeLists.txt file for this +// project +CMAKE_HOME_DIRECTORY:INTERNAL=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src +//Install .so files without execute permission. +CMAKE_INSTALL_SO_NO_EXE:INTERNAL=1 +//ADVANCED property for variable: CMAKE_LINKER +CMAKE_LINKER-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MAKE_PROGRAM +CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS +CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG +CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL +CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE +CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_NM +CMAKE_NM-ADVANCED:INTERNAL=1 +//number of local generators +CMAKE_NUMBER_OF_MAKEFILES:INTERNAL=1 +//ADVANCED property for variable: CMAKE_OBJCOPY +CMAKE_OBJCOPY-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_OBJDUMP +CMAKE_OBJDUMP-ADVANCED:INTERNAL=1 +//Platform information initialized +CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_RANLIB +CMAKE_RANLIB-ADVANCED:INTERNAL=1 +//Path to CMake installation. +CMAKE_ROOT:INTERNAL=/usr/share/cmake-3.10 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS +CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG +CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL +CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE +CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SKIP_INSTALL_RPATH +CMAKE_SKIP_INSTALL_RPATH-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SKIP_RPATH +CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS +CMAKE_STATIC_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_DEBUG +CMAKE_STATIC_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL +CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELEASE +CMAKE_STATIC_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STRIP +CMAKE_STRIP-ADVANCED:INTERNAL=1 +//uname command +CMAKE_UNAME:INTERNAL=/bin/uname +//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE +CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1 + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeCCompiler.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeCCompiler.cmake new file mode 100644 index 0000000000000000000000000000000000000000..9e0e71d8c6b896ec560f07199f9cc974e84817a5 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeCCompiler.cmake @@ -0,0 +1,73 @@ +set(CMAKE_C_COMPILER "/usr/bin/cc") +set(CMAKE_C_COMPILER_ARG1 "") +set(CMAKE_C_COMPILER_ID "GNU") +set(CMAKE_C_COMPILER_VERSION "7.5.0") +set(CMAKE_C_COMPILER_VERSION_INTERNAL "") +set(CMAKE_C_COMPILER_WRAPPER "") +set(CMAKE_C_STANDARD_COMPUTED_DEFAULT "11") +set(CMAKE_C_COMPILE_FEATURES "c_std_90;c_function_prototypes;c_std_99;c_restrict;c_variadic_macros;c_std_11;c_static_assert") +set(CMAKE_C90_COMPILE_FEATURES "c_std_90;c_function_prototypes") +set(CMAKE_C99_COMPILE_FEATURES "c_std_99;c_restrict;c_variadic_macros") +set(CMAKE_C11_COMPILE_FEATURES "c_std_11;c_static_assert") + +set(CMAKE_C_PLATFORM_ID "Linux") +set(CMAKE_C_SIMULATE_ID "") +set(CMAKE_C_SIMULATE_VERSION "") + + + +set(CMAKE_AR "/usr/bin/ar") +set(CMAKE_C_COMPILER_AR "/usr/bin/gcc-ar-7") +set(CMAKE_RANLIB "/usr/bin/ranlib") +set(CMAKE_C_COMPILER_RANLIB "/usr/bin/gcc-ranlib-7") +set(CMAKE_LINKER "/usr/bin/ld") +set(CMAKE_COMPILER_IS_GNUCC 1) +set(CMAKE_C_COMPILER_LOADED 1) +set(CMAKE_C_COMPILER_WORKS TRUE) +set(CMAKE_C_ABI_COMPILED TRUE) +set(CMAKE_COMPILER_IS_MINGW ) +set(CMAKE_COMPILER_IS_CYGWIN ) +if(CMAKE_COMPILER_IS_CYGWIN) + set(CYGWIN 1) + set(UNIX 1) +endif() + +set(CMAKE_C_COMPILER_ENV_VAR "CC") + +if(CMAKE_COMPILER_IS_MINGW) + set(MINGW 1) +endif() +set(CMAKE_C_COMPILER_ID_RUN 1) +set(CMAKE_C_SOURCE_FILE_EXTENSIONS c;m) +set(CMAKE_C_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) +set(CMAKE_C_LINKER_PREFERENCE 10) + +# Save compiler ABI information. +set(CMAKE_C_SIZEOF_DATA_PTR "8") +set(CMAKE_C_COMPILER_ABI "ELF") +set(CMAKE_C_LIBRARY_ARCHITECTURE "x86_64-linux-gnu") + +if(CMAKE_C_SIZEOF_DATA_PTR) + set(CMAKE_SIZEOF_VOID_P "${CMAKE_C_SIZEOF_DATA_PTR}") +endif() + +if(CMAKE_C_COMPILER_ABI) + set(CMAKE_INTERNAL_PLATFORM_ABI "${CMAKE_C_COMPILER_ABI}") +endif() + +if(CMAKE_C_LIBRARY_ARCHITECTURE) + set(CMAKE_LIBRARY_ARCHITECTURE "x86_64-linux-gnu") +endif() + +set(CMAKE_C_CL_SHOWINCLUDES_PREFIX "") +if(CMAKE_C_CL_SHOWINCLUDES_PREFIX) + set(CMAKE_CL_SHOWINCLUDES_PREFIX "${CMAKE_C_CL_SHOWINCLUDES_PREFIX}") +endif() + + + + + +set(CMAKE_C_IMPLICIT_LINK_LIBRARIES "gcc;gcc_s;c;gcc;gcc_s") +set(CMAKE_C_IMPLICIT_LINK_DIRECTORIES "/usr/lib/gcc/x86_64-linux-gnu/7;/usr/lib/x86_64-linux-gnu;/usr/lib;/lib/x86_64-linux-gnu;/lib") +set(CMAKE_C_IMPLICIT_LINK_FRAMEWORK_DIRECTORIES "") diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeCXXCompiler.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeCXXCompiler.cmake new file mode 100644 index 0000000000000000000000000000000000000000..183707ea33cfc6eee35b1cb8451688bafd571b23 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeCXXCompiler.cmake @@ -0,0 +1,75 @@ +set(CMAKE_CXX_COMPILER "/usr/bin/aarch64-linux-gnu-g++") +set(CMAKE_CXX_COMPILER_ARG1 "") +set(CMAKE_CXX_COMPILER_ID "GNU") +set(CMAKE_CXX_COMPILER_VERSION "7.5.0") +set(CMAKE_CXX_COMPILER_VERSION_INTERNAL "") +set(CMAKE_CXX_COMPILER_WRAPPER "") +set(CMAKE_CXX_STANDARD_COMPUTED_DEFAULT "14") +set(CMAKE_CXX_COMPILE_FEATURES "cxx_std_98;cxx_template_template_parameters;cxx_std_11;cxx_alias_templates;cxx_alignas;cxx_alignof;cxx_attributes;cxx_auto_type;cxx_constexpr;cxx_decltype;cxx_decltype_incomplete_return_types;cxx_default_function_template_args;cxx_defaulted_functions;cxx_defaulted_move_initializers;cxx_delegating_constructors;cxx_deleted_functions;cxx_enum_forward_declarations;cxx_explicit_conversions;cxx_extended_friend_declarations;cxx_extern_templates;cxx_final;cxx_func_identifier;cxx_generalized_initializers;cxx_inheriting_constructors;cxx_inline_namespaces;cxx_lambdas;cxx_local_type_template_args;cxx_long_long_type;cxx_noexcept;cxx_nonstatic_member_init;cxx_nullptr;cxx_override;cxx_range_for;cxx_raw_string_literals;cxx_reference_qualified_functions;cxx_right_angle_brackets;cxx_rvalue_references;cxx_sizeof_member;cxx_static_assert;cxx_strong_enums;cxx_thread_local;cxx_trailing_return_types;cxx_unicode_literals;cxx_uniform_initialization;cxx_unrestricted_unions;cxx_user_literals;cxx_variadic_macros;cxx_variadic_templates;cxx_std_14;cxx_aggregate_default_initializers;cxx_attribute_deprecated;cxx_binary_literals;cxx_contextual_conversions;cxx_decltype_auto;cxx_digit_separators;cxx_generic_lambdas;cxx_lambda_init_captures;cxx_relaxed_constexpr;cxx_return_type_deduction;cxx_variable_templates;cxx_std_17") +set(CMAKE_CXX98_COMPILE_FEATURES "cxx_std_98;cxx_template_template_parameters") +set(CMAKE_CXX11_COMPILE_FEATURES "cxx_std_11;cxx_alias_templates;cxx_alignas;cxx_alignof;cxx_attributes;cxx_auto_type;cxx_constexpr;cxx_decltype;cxx_decltype_incomplete_return_types;cxx_default_function_template_args;cxx_defaulted_functions;cxx_defaulted_move_initializers;cxx_delegating_constructors;cxx_deleted_functions;cxx_enum_forward_declarations;cxx_explicit_conversions;cxx_extended_friend_declarations;cxx_extern_templates;cxx_final;cxx_func_identifier;cxx_generalized_initializers;cxx_inheriting_constructors;cxx_inline_namespaces;cxx_lambdas;cxx_local_type_template_args;cxx_long_long_type;cxx_noexcept;cxx_nonstatic_member_init;cxx_nullptr;cxx_override;cxx_range_for;cxx_raw_string_literals;cxx_reference_qualified_functions;cxx_right_angle_brackets;cxx_rvalue_references;cxx_sizeof_member;cxx_static_assert;cxx_strong_enums;cxx_thread_local;cxx_trailing_return_types;cxx_unicode_literals;cxx_uniform_initialization;cxx_unrestricted_unions;cxx_user_literals;cxx_variadic_macros;cxx_variadic_templates") +set(CMAKE_CXX14_COMPILE_FEATURES "cxx_std_14;cxx_aggregate_default_initializers;cxx_attribute_deprecated;cxx_binary_literals;cxx_contextual_conversions;cxx_decltype_auto;cxx_digit_separators;cxx_generic_lambdas;cxx_lambda_init_captures;cxx_relaxed_constexpr;cxx_return_type_deduction;cxx_variable_templates") +set(CMAKE_CXX17_COMPILE_FEATURES "cxx_std_17") + +set(CMAKE_CXX_PLATFORM_ID "Linux") +set(CMAKE_CXX_SIMULATE_ID "") +set(CMAKE_CXX_SIMULATE_VERSION "") + + + +set(CMAKE_AR "/usr/bin/ar") +set(CMAKE_CXX_COMPILER_AR "/usr/bin/gcc-ar-7") +set(CMAKE_RANLIB "/usr/bin/ranlib") +set(CMAKE_CXX_COMPILER_RANLIB "/usr/bin/gcc-ranlib-7") +set(CMAKE_LINKER "/usr/bin/ld") +set(CMAKE_COMPILER_IS_GNUCXX 1) +set(CMAKE_CXX_COMPILER_LOADED 1) +set(CMAKE_CXX_COMPILER_WORKS TRUE) +set(CMAKE_CXX_ABI_COMPILED TRUE) +set(CMAKE_COMPILER_IS_MINGW ) +set(CMAKE_COMPILER_IS_CYGWIN ) +if(CMAKE_COMPILER_IS_CYGWIN) + set(CYGWIN 1) + set(UNIX 1) +endif() + +set(CMAKE_CXX_COMPILER_ENV_VAR "CXX") + +if(CMAKE_COMPILER_IS_MINGW) + set(MINGW 1) +endif() +set(CMAKE_CXX_COMPILER_ID_RUN 1) +set(CMAKE_CXX_IGNORE_EXTENSIONS inl;h;hpp;HPP;H;o;O;obj;OBJ;def;DEF;rc;RC) +set(CMAKE_CXX_SOURCE_FILE_EXTENSIONS C;M;c++;cc;cpp;cxx;mm;CPP) +set(CMAKE_CXX_LINKER_PREFERENCE 30) +set(CMAKE_CXX_LINKER_PREFERENCE_PROPAGATES 1) + +# Save compiler ABI information. +set(CMAKE_CXX_SIZEOF_DATA_PTR "8") +set(CMAKE_CXX_COMPILER_ABI "ELF") +set(CMAKE_CXX_LIBRARY_ARCHITECTURE "aarch64-linux-gnu") + +if(CMAKE_CXX_SIZEOF_DATA_PTR) + set(CMAKE_SIZEOF_VOID_P "${CMAKE_CXX_SIZEOF_DATA_PTR}") +endif() + +if(CMAKE_CXX_COMPILER_ABI) + set(CMAKE_INTERNAL_PLATFORM_ABI "${CMAKE_CXX_COMPILER_ABI}") +endif() + +if(CMAKE_CXX_LIBRARY_ARCHITECTURE) + set(CMAKE_LIBRARY_ARCHITECTURE "aarch64-linux-gnu") +endif() + +set(CMAKE_CXX_CL_SHOWINCLUDES_PREFIX "") +if(CMAKE_CXX_CL_SHOWINCLUDES_PREFIX) + set(CMAKE_CL_SHOWINCLUDES_PREFIX "${CMAKE_CXX_CL_SHOWINCLUDES_PREFIX}") +endif() + + + + + +set(CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "stdc++;m;gcc_s;gcc;c;gcc_s;gcc") +set(CMAKE_CXX_IMPLICIT_LINK_DIRECTORIES "/usr/lib/gcc-cross/aarch64-linux-gnu/7;/usr/aarch64-linux-gnu/lib;/lib/aarch64-linux-gnu;/lib;/usr/lib/aarch64-linux-gnu;/usr/lib") +set(CMAKE_CXX_IMPLICIT_LINK_FRAMEWORK_DIRECTORIES "") diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeDetermineCompilerABI_C.bin b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeDetermineCompilerABI_C.bin new file mode 100644 index 0000000000000000000000000000000000000000..11a81f89210470f202bc1fddbbb3d2a45f3d9fd9 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeDetermineCompilerABI_C.bin differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeDetermineCompilerABI_CXX.bin b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeDetermineCompilerABI_CXX.bin new file mode 100644 index 0000000000000000000000000000000000000000..57f9777afd3977d6c599e4bf9d91fdd24251c6b1 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeDetermineCompilerABI_CXX.bin differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeSystem.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeSystem.cmake new file mode 100644 index 0000000000000000000000000000000000000000..e49bf5189ecd13415de719baf720aa143f5e2446 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CMakeSystem.cmake @@ -0,0 +1,15 @@ +set(CMAKE_HOST_SYSTEM "Linux-5.4.0-65-generic") +set(CMAKE_HOST_SYSTEM_NAME "Linux") +set(CMAKE_HOST_SYSTEM_VERSION "5.4.0-65-generic") +set(CMAKE_HOST_SYSTEM_PROCESSOR "x86_64") + + + +set(CMAKE_SYSTEM "Linux-5.4.0-65-generic") +set(CMAKE_SYSTEM_NAME "Linux") +set(CMAKE_SYSTEM_VERSION "5.4.0-65-generic") +set(CMAKE_SYSTEM_PROCESSOR "x86_64") + +set(CMAKE_CROSSCOMPILING "FALSE") + +set(CMAKE_SYSTEM_LOADED 1) diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/CMakeCCompilerId.c b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/CMakeCCompilerId.c new file mode 100644 index 0000000000000000000000000000000000000000..722faa803f6df8628261ccd5da97b74fa64c0114 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/CMakeCCompilerId.c @@ -0,0 +1,598 @@ +#ifdef __cplusplus +# error "A C++ compiler has been selected for C." +#endif + +#if defined(__18CXX) +# define ID_VOID_MAIN +#endif +#if defined(__CLASSIC_C__) +/* cv-qualifiers did not exist in K&R C */ +# define const +# define volatile +#endif + + +/* Version number components: V=Version, R=Revision, P=Patch + Version date components: YYYY=Year, MM=Month, DD=Day */ + +#if defined(__INTEL_COMPILER) || defined(__ICC) +# define COMPILER_ID "Intel" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif + /* __INTEL_COMPILER = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__INTEL_COMPILER/100) +# define COMPILER_VERSION_MINOR DEC(__INTEL_COMPILER/10 % 10) +# if defined(__INTEL_COMPILER_UPDATE) +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER_UPDATE) +# else +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER % 10) +# endif +# if defined(__INTEL_COMPILER_BUILD_DATE) + /* __INTEL_COMPILER_BUILD_DATE = YYYYMMDD */ +# define COMPILER_VERSION_TWEAK DEC(__INTEL_COMPILER_BUILD_DATE) +# endif +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif + +#elif defined(__PATHCC__) +# define COMPILER_ID "PathScale" +# define COMPILER_VERSION_MAJOR DEC(__PATHCC__) +# define COMPILER_VERSION_MINOR DEC(__PATHCC_MINOR__) +# if defined(__PATHCC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PATHCC_PATCHLEVEL__) +# endif + +#elif defined(__BORLANDC__) && defined(__CODEGEARC_VERSION__) +# define COMPILER_ID "Embarcadero" +# define COMPILER_VERSION_MAJOR HEX(__CODEGEARC_VERSION__>>24 & 0x00FF) +# define COMPILER_VERSION_MINOR HEX(__CODEGEARC_VERSION__>>16 & 0x00FF) +# define COMPILER_VERSION_PATCH DEC(__CODEGEARC_VERSION__ & 0xFFFF) + +#elif defined(__BORLANDC__) +# define COMPILER_ID "Borland" + /* __BORLANDC__ = 0xVRR */ +# define COMPILER_VERSION_MAJOR HEX(__BORLANDC__>>8) +# define COMPILER_VERSION_MINOR HEX(__BORLANDC__ & 0xFF) + +#elif defined(__WATCOMC__) && __WATCOMC__ < 1200 +# define COMPILER_ID "Watcom" + /* __WATCOMC__ = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(__WATCOMC__ / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__WATCOMC__) +# define COMPILER_ID "OpenWatcom" + /* __WATCOMC__ = VVRP + 1100 */ +# define COMPILER_VERSION_MAJOR DEC((__WATCOMC__ - 1100) / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__SUNPRO_C) +# define COMPILER_ID "SunPro" +# if __SUNPRO_C >= 0x5100 + /* __SUNPRO_C = 0xVRRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_C>>12) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_C>>4 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_C & 0xF) +# else + /* __SUNPRO_CC = 0xVRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_C>>8) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_C>>4 & 0xF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_C & 0xF) +# endif + +#elif defined(__HP_cc) +# define COMPILER_ID "HP" + /* __HP_cc = VVRRPP */ +# define COMPILER_VERSION_MAJOR DEC(__HP_cc/10000) +# define COMPILER_VERSION_MINOR DEC(__HP_cc/100 % 100) +# define COMPILER_VERSION_PATCH DEC(__HP_cc % 100) + +#elif defined(__DECC) +# define COMPILER_ID "Compaq" + /* __DECC_VER = VVRRTPPPP */ +# define COMPILER_VERSION_MAJOR DEC(__DECC_VER/10000000) +# define COMPILER_VERSION_MINOR DEC(__DECC_VER/100000 % 100) +# define COMPILER_VERSION_PATCH DEC(__DECC_VER % 10000) + +#elif defined(__IBMC__) && defined(__COMPILER_VER__) +# define COMPILER_ID "zOS" + /* __IBMC__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMC__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMC__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMC__ % 10) + +#elif defined(__IBMC__) && !defined(__COMPILER_VER__) && __IBMC__ >= 800 +# define COMPILER_ID "XL" + /* __IBMC__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMC__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMC__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMC__ % 10) + +#elif defined(__IBMC__) && !defined(__COMPILER_VER__) && __IBMC__ < 800 +# define COMPILER_ID "VisualAge" + /* __IBMC__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMC__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMC__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMC__ % 10) + +#elif defined(__PGI) +# define COMPILER_ID "PGI" +# define COMPILER_VERSION_MAJOR DEC(__PGIC__) +# define COMPILER_VERSION_MINOR DEC(__PGIC_MINOR__) +# if defined(__PGIC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PGIC_PATCHLEVEL__) +# endif + +#elif defined(_CRAYC) +# define COMPILER_ID "Cray" +# define COMPILER_VERSION_MAJOR DEC(_RELEASE_MAJOR) +# define COMPILER_VERSION_MINOR DEC(_RELEASE_MINOR) + +#elif defined(__TI_COMPILER_VERSION__) +# define COMPILER_ID "TI" + /* __TI_COMPILER_VERSION__ = VVVRRRPPP */ +# define COMPILER_VERSION_MAJOR DEC(__TI_COMPILER_VERSION__/1000000) +# define COMPILER_VERSION_MINOR DEC(__TI_COMPILER_VERSION__/1000 % 1000) +# define COMPILER_VERSION_PATCH DEC(__TI_COMPILER_VERSION__ % 1000) + +#elif defined(__FUJITSU) || defined(__FCC_VERSION) || defined(__fcc_version) +# define COMPILER_ID "Fujitsu" + +#elif defined(__TINYC__) +# define COMPILER_ID "TinyCC" + +#elif defined(__BCC__) +# define COMPILER_ID "Bruce" + +#elif defined(__SCO_VERSION__) +# define COMPILER_ID "SCO" + +#elif defined(__clang__) && defined(__apple_build_version__) +# define COMPILER_ID "AppleClang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif +# define COMPILER_VERSION_TWEAK DEC(__apple_build_version__) + +#elif defined(__clang__) +# define COMPILER_ID "Clang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif + +#elif defined(__GNUC__) +# define COMPILER_ID "GNU" +# define COMPILER_VERSION_MAJOR DEC(__GNUC__) +# if defined(__GNUC_MINOR__) +# define COMPILER_VERSION_MINOR DEC(__GNUC_MINOR__) +# endif +# if defined(__GNUC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) +# endif + +#elif defined(_MSC_VER) +# define COMPILER_ID "MSVC" + /* _MSC_VER = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(_MSC_VER / 100) +# define COMPILER_VERSION_MINOR DEC(_MSC_VER % 100) +# if defined(_MSC_FULL_VER) +# if _MSC_VER >= 1400 + /* _MSC_FULL_VER = VVRRPPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 100000) +# else + /* _MSC_FULL_VER = VVRRPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 10000) +# endif +# endif +# if defined(_MSC_BUILD) +# define COMPILER_VERSION_TWEAK DEC(_MSC_BUILD) +# endif + +#elif defined(__VISUALDSPVERSION__) || defined(__ADSPBLACKFIN__) || defined(__ADSPTS__) || defined(__ADSP21000__) +# define COMPILER_ID "ADSP" +#if defined(__VISUALDSPVERSION__) + /* __VISUALDSPVERSION__ = 0xVVRRPP00 */ +# define COMPILER_VERSION_MAJOR HEX(__VISUALDSPVERSION__>>24) +# define COMPILER_VERSION_MINOR HEX(__VISUALDSPVERSION__>>16 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__VISUALDSPVERSION__>>8 & 0xFF) +#endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# define COMPILER_ID "IAR" +# if defined(__VER__) +# define COMPILER_VERSION_MAJOR DEC((__VER__) / 1000000) +# define COMPILER_VERSION_MINOR DEC(((__VER__) / 1000) % 1000) +# define COMPILER_VERSION_PATCH DEC((__VER__) % 1000) +# define COMPILER_VERSION_INTERNAL DEC(__IAR_SYSTEMS_ICC__) +# endif + +#elif defined(__ARMCC_VERSION) +# define COMPILER_ID "ARMCC" +#if __ARMCC_VERSION >= 1000000 + /* __ARMCC_VERSION = VRRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/1000000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 100) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#else + /* __ARMCC_VERSION = VRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/100000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 10) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#endif + + +#elif defined(__SDCC_VERSION_MAJOR) || defined(SDCC) +# define COMPILER_ID "SDCC" +# if defined(__SDCC_VERSION_MAJOR) +# define COMPILER_VERSION_MAJOR DEC(__SDCC_VERSION_MAJOR) +# define COMPILER_VERSION_MINOR DEC(__SDCC_VERSION_MINOR) +# define COMPILER_VERSION_PATCH DEC(__SDCC_VERSION_PATCH) +# else + /* SDCC = VRP */ +# define COMPILER_VERSION_MAJOR DEC(SDCC/100) +# define COMPILER_VERSION_MINOR DEC(SDCC/10 % 10) +# define COMPILER_VERSION_PATCH DEC(SDCC % 10) +# endif + +#elif defined(_SGI_COMPILER_VERSION) || defined(_COMPILER_VERSION) +# define COMPILER_ID "MIPSpro" +# if defined(_SGI_COMPILER_VERSION) + /* _SGI_COMPILER_VERSION = VRP */ +# define COMPILER_VERSION_MAJOR DEC(_SGI_COMPILER_VERSION/100) +# define COMPILER_VERSION_MINOR DEC(_SGI_COMPILER_VERSION/10 % 10) +# define COMPILER_VERSION_PATCH DEC(_SGI_COMPILER_VERSION % 10) +# else + /* _COMPILER_VERSION = VRP */ +# define COMPILER_VERSION_MAJOR DEC(_COMPILER_VERSION/100) +# define COMPILER_VERSION_MINOR DEC(_COMPILER_VERSION/10 % 10) +# define COMPILER_VERSION_PATCH DEC(_COMPILER_VERSION % 10) +# endif + + +/* These compilers are either not known or too old to define an + identification macro. Try to identify the platform and guess that + it is the native compiler. */ +#elif defined(__sgi) +# define COMPILER_ID "MIPSpro" + +#elif defined(__hpux) || defined(__hpua) +# define COMPILER_ID "HP" + +#else /* unknown compiler */ +# define COMPILER_ID "" +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_compiler = "INFO" ":" "compiler[" COMPILER_ID "]"; +#ifdef SIMULATE_ID +char const* info_simulate = "INFO" ":" "simulate[" SIMULATE_ID "]"; +#endif + +#ifdef __QNXNTO__ +char const* qnxnto = "INFO" ":" "qnxnto[]"; +#endif + +#if defined(__CRAYXE) || defined(__CRAYXC) +char const *info_cray = "INFO" ":" "compiler_wrapper[CrayPrgEnv]"; +#endif + +#define STRINGIFY_HELPER(X) #X +#define STRINGIFY(X) STRINGIFY_HELPER(X) + +/* Identify known platforms by name. */ +#if defined(__linux) || defined(__linux__) || defined(linux) +# define PLATFORM_ID "Linux" + +#elif defined(__CYGWIN__) +# define PLATFORM_ID "Cygwin" + +#elif defined(__MINGW32__) +# define PLATFORM_ID "MinGW" + +#elif defined(__APPLE__) +# define PLATFORM_ID "Darwin" + +#elif defined(_WIN32) || defined(__WIN32__) || defined(WIN32) +# define PLATFORM_ID "Windows" + +#elif defined(__FreeBSD__) || defined(__FreeBSD) +# define PLATFORM_ID "FreeBSD" + +#elif defined(__NetBSD__) || defined(__NetBSD) +# define PLATFORM_ID "NetBSD" + +#elif defined(__OpenBSD__) || defined(__OPENBSD) +# define PLATFORM_ID "OpenBSD" + +#elif defined(__sun) || defined(sun) +# define PLATFORM_ID "SunOS" + +#elif defined(_AIX) || defined(__AIX) || defined(__AIX__) || defined(__aix) || defined(__aix__) +# define PLATFORM_ID "AIX" + +#elif defined(__sgi) || defined(__sgi__) || defined(_SGI) +# define PLATFORM_ID "IRIX" + +#elif defined(__hpux) || defined(__hpux__) +# define PLATFORM_ID "HP-UX" + +#elif defined(__HAIKU__) +# define PLATFORM_ID "Haiku" + +#elif defined(__BeOS) || defined(__BEOS__) || defined(_BEOS) +# define PLATFORM_ID "BeOS" + +#elif defined(__QNX__) || defined(__QNXNTO__) +# define PLATFORM_ID "QNX" + +#elif defined(__tru64) || defined(_tru64) || defined(__TRU64__) +# define PLATFORM_ID "Tru64" + +#elif defined(__riscos) || defined(__riscos__) +# define PLATFORM_ID "RISCos" + +#elif defined(__sinix) || defined(__sinix__) || defined(__SINIX__) +# define PLATFORM_ID "SINIX" + +#elif defined(__UNIX_SV__) +# define PLATFORM_ID "UNIX_SV" + +#elif defined(__bsdos__) +# define PLATFORM_ID "BSDOS" + +#elif defined(_MPRAS) || defined(MPRAS) +# define PLATFORM_ID "MP-RAS" + +#elif defined(__osf) || defined(__osf__) +# define PLATFORM_ID "OSF1" + +#elif defined(_SCO_SV) || defined(SCO_SV) || defined(sco_sv) +# define PLATFORM_ID "SCO_SV" + +#elif defined(__ultrix) || defined(__ultrix__) || defined(_ULTRIX) +# define PLATFORM_ID "ULTRIX" + +#elif defined(__XENIX__) || defined(_XENIX) || defined(XENIX) +# define PLATFORM_ID "Xenix" + +#elif defined(__WATCOMC__) +# if defined(__LINUX__) +# define PLATFORM_ID "Linux" + +# elif defined(__DOS__) +# define PLATFORM_ID "DOS" + +# elif defined(__OS2__) +# define PLATFORM_ID "OS2" + +# elif defined(__WINDOWS__) +# define PLATFORM_ID "Windows3x" + +# else /* unknown platform */ +# define PLATFORM_ID +# endif + +#else /* unknown platform */ +# define PLATFORM_ID + +#endif + +/* For windows compilers MSVC and Intel we can determine + the architecture of the compiler being used. This is because + the compilers do not have flags that can change the architecture, + but rather depend on which compiler is being used +*/ +#if defined(_WIN32) && defined(_MSC_VER) +# if defined(_M_IA64) +# define ARCHITECTURE_ID "IA64" + +# elif defined(_M_X64) || defined(_M_AMD64) +# define ARCHITECTURE_ID "x64" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# elif defined(_M_ARM64) +# define ARCHITECTURE_ID "ARM64" + +# elif defined(_M_ARM) +# if _M_ARM == 4 +# define ARCHITECTURE_ID "ARMV4I" +# elif _M_ARM == 5 +# define ARCHITECTURE_ID "ARMV5I" +# else +# define ARCHITECTURE_ID "ARMV" STRINGIFY(_M_ARM) +# endif + +# elif defined(_M_MIPS) +# define ARCHITECTURE_ID "MIPS" + +# elif defined(_M_SH) +# define ARCHITECTURE_ID "SHx" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__WATCOMC__) +# if defined(_M_I86) +# define ARCHITECTURE_ID "I86" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# if defined(__ICCARM__) +# define ARCHITECTURE_ID "ARM" + +# elif defined(__ICCAVR__) +# define ARCHITECTURE_ID "AVR" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif +#else +# define ARCHITECTURE_ID +#endif + +/* Convert integer to decimal digit literals. */ +#define DEC(n) \ + ('0' + (((n) / 10000000)%10)), \ + ('0' + (((n) / 1000000)%10)), \ + ('0' + (((n) / 100000)%10)), \ + ('0' + (((n) / 10000)%10)), \ + ('0' + (((n) / 1000)%10)), \ + ('0' + (((n) / 100)%10)), \ + ('0' + (((n) / 10)%10)), \ + ('0' + ((n) % 10)) + +/* Convert integer to hex digit literals. */ +#define HEX(n) \ + ('0' + ((n)>>28 & 0xF)), \ + ('0' + ((n)>>24 & 0xF)), \ + ('0' + ((n)>>20 & 0xF)), \ + ('0' + ((n)>>16 & 0xF)), \ + ('0' + ((n)>>12 & 0xF)), \ + ('0' + ((n)>>8 & 0xF)), \ + ('0' + ((n)>>4 & 0xF)), \ + ('0' + ((n) & 0xF)) + +/* Construct a string literal encoding the version number components. */ +#ifdef COMPILER_VERSION_MAJOR +char const info_version[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','[', + COMPILER_VERSION_MAJOR, +# ifdef COMPILER_VERSION_MINOR + '.', COMPILER_VERSION_MINOR, +# ifdef COMPILER_VERSION_PATCH + '.', COMPILER_VERSION_PATCH, +# ifdef COMPILER_VERSION_TWEAK + '.', COMPILER_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct a string literal encoding the internal version number. */ +#ifdef COMPILER_VERSION_INTERNAL +char const info_version_internal[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','_', + 'i','n','t','e','r','n','a','l','[', + COMPILER_VERSION_INTERNAL,']','\0'}; +#endif + +/* Construct a string literal encoding the version number components. */ +#ifdef SIMULATE_VERSION_MAJOR +char const info_simulate_version[] = { + 'I', 'N', 'F', 'O', ':', + 's','i','m','u','l','a','t','e','_','v','e','r','s','i','o','n','[', + SIMULATE_VERSION_MAJOR, +# ifdef SIMULATE_VERSION_MINOR + '.', SIMULATE_VERSION_MINOR, +# ifdef SIMULATE_VERSION_PATCH + '.', SIMULATE_VERSION_PATCH, +# ifdef SIMULATE_VERSION_TWEAK + '.', SIMULATE_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_platform = "INFO" ":" "platform[" PLATFORM_ID "]"; +char const* info_arch = "INFO" ":" "arch[" ARCHITECTURE_ID "]"; + + + + +#if !defined(__STDC__) +# if defined(_MSC_VER) && !defined(__clang__) +# define C_DIALECT "90" +# else +# define C_DIALECT +# endif +#elif __STDC_VERSION__ >= 201000L +# define C_DIALECT "11" +#elif __STDC_VERSION__ >= 199901L +# define C_DIALECT "99" +#else +# define C_DIALECT "90" +#endif +const char* info_language_dialect_default = + "INFO" ":" "dialect_default[" C_DIALECT "]"; + +/*--------------------------------------------------------------------------*/ + +#ifdef ID_VOID_MAIN +void main() {} +#else +# if defined(__CLASSIC_C__) +int main(argc, argv) int argc; char *argv[]; +# else +int main(int argc, char* argv[]) +# endif +{ + int require = 0; + require += info_compiler[argc]; + require += info_platform[argc]; + require += info_arch[argc]; +#ifdef COMPILER_VERSION_MAJOR + require += info_version[argc]; +#endif +#ifdef COMPILER_VERSION_INTERNAL + require += info_version_internal[argc]; +#endif +#ifdef SIMULATE_ID + require += info_simulate[argc]; +#endif +#ifdef SIMULATE_VERSION_MAJOR + require += info_simulate_version[argc]; +#endif +#if defined(__CRAYXE) || defined(__CRAYXC) + require += info_cray[argc]; +#endif + require += info_language_dialect_default[argc]; + (void)argv; + return require; +} +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/a.out b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/a.out new file mode 100644 index 0000000000000000000000000000000000000000..11b7df452ad29dfd7bf1d9b188856c9e33182403 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/a.out differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/CMakeCXXCompilerId.cpp b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/CMakeCXXCompilerId.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2d66298588989dc5d404dae0025b8bf4e952498e --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/CMakeCXXCompilerId.cpp @@ -0,0 +1,576 @@ +/* This source file must have a .cpp extension so that all C++ compilers + recognize the extension without flags. Borland does not know .cxx for + example. */ +#ifndef __cplusplus +# error "A C compiler has been selected for C++." +#endif + + +/* Version number components: V=Version, R=Revision, P=Patch + Version date components: YYYY=Year, MM=Month, DD=Day */ + +#if defined(__COMO__) +# define COMPILER_ID "Comeau" + /* __COMO_VERSION__ = VRR */ +# define COMPILER_VERSION_MAJOR DEC(__COMO_VERSION__ / 100) +# define COMPILER_VERSION_MINOR DEC(__COMO_VERSION__ % 100) + +#elif defined(__INTEL_COMPILER) || defined(__ICC) +# define COMPILER_ID "Intel" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif + /* __INTEL_COMPILER = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__INTEL_COMPILER/100) +# define COMPILER_VERSION_MINOR DEC(__INTEL_COMPILER/10 % 10) +# if defined(__INTEL_COMPILER_UPDATE) +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER_UPDATE) +# else +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER % 10) +# endif +# if defined(__INTEL_COMPILER_BUILD_DATE) + /* __INTEL_COMPILER_BUILD_DATE = YYYYMMDD */ +# define COMPILER_VERSION_TWEAK DEC(__INTEL_COMPILER_BUILD_DATE) +# endif +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif + +#elif defined(__PATHCC__) +# define COMPILER_ID "PathScale" +# define COMPILER_VERSION_MAJOR DEC(__PATHCC__) +# define COMPILER_VERSION_MINOR DEC(__PATHCC_MINOR__) +# if defined(__PATHCC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PATHCC_PATCHLEVEL__) +# endif + +#elif defined(__BORLANDC__) && defined(__CODEGEARC_VERSION__) +# define COMPILER_ID "Embarcadero" +# define COMPILER_VERSION_MAJOR HEX(__CODEGEARC_VERSION__>>24 & 0x00FF) +# define COMPILER_VERSION_MINOR HEX(__CODEGEARC_VERSION__>>16 & 0x00FF) +# define COMPILER_VERSION_PATCH DEC(__CODEGEARC_VERSION__ & 0xFFFF) + +#elif defined(__BORLANDC__) +# define COMPILER_ID "Borland" + /* __BORLANDC__ = 0xVRR */ +# define COMPILER_VERSION_MAJOR HEX(__BORLANDC__>>8) +# define COMPILER_VERSION_MINOR HEX(__BORLANDC__ & 0xFF) + +#elif defined(__WATCOMC__) && __WATCOMC__ < 1200 +# define COMPILER_ID "Watcom" + /* __WATCOMC__ = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(__WATCOMC__ / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__WATCOMC__) +# define COMPILER_ID "OpenWatcom" + /* __WATCOMC__ = VVRP + 1100 */ +# define COMPILER_VERSION_MAJOR DEC((__WATCOMC__ - 1100) / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__SUNPRO_CC) +# define COMPILER_ID "SunPro" +# if __SUNPRO_CC >= 0x5100 + /* __SUNPRO_CC = 0xVRRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>12) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF) +# else + /* __SUNPRO_CC = 0xVRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>8) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF) +# endif + +#elif defined(__HP_aCC) +# define COMPILER_ID "HP" + /* __HP_aCC = VVRRPP */ +# define COMPILER_VERSION_MAJOR DEC(__HP_aCC/10000) +# define COMPILER_VERSION_MINOR DEC(__HP_aCC/100 % 100) +# define COMPILER_VERSION_PATCH DEC(__HP_aCC % 100) + +#elif defined(__DECCXX) +# define COMPILER_ID "Compaq" + /* __DECCXX_VER = VVRRTPPPP */ +# define COMPILER_VERSION_MAJOR DEC(__DECCXX_VER/10000000) +# define COMPILER_VERSION_MINOR DEC(__DECCXX_VER/100000 % 100) +# define COMPILER_VERSION_PATCH DEC(__DECCXX_VER % 10000) + +#elif defined(__IBMCPP__) && defined(__COMPILER_VER__) +# define COMPILER_ID "zOS" + /* __IBMCPP__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) + +#elif defined(__IBMCPP__) && !defined(__COMPILER_VER__) && __IBMCPP__ >= 800 +# define COMPILER_ID "XL" + /* __IBMCPP__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) + +#elif defined(__IBMCPP__) && !defined(__COMPILER_VER__) && __IBMCPP__ < 800 +# define COMPILER_ID "VisualAge" + /* __IBMCPP__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) + +#elif defined(__PGI) +# define COMPILER_ID "PGI" +# define COMPILER_VERSION_MAJOR DEC(__PGIC__) +# define COMPILER_VERSION_MINOR DEC(__PGIC_MINOR__) +# if defined(__PGIC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PGIC_PATCHLEVEL__) +# endif + +#elif defined(_CRAYC) +# define COMPILER_ID "Cray" +# define COMPILER_VERSION_MAJOR DEC(_RELEASE_MAJOR) +# define COMPILER_VERSION_MINOR DEC(_RELEASE_MINOR) + +#elif defined(__TI_COMPILER_VERSION__) +# define COMPILER_ID "TI" + /* __TI_COMPILER_VERSION__ = VVVRRRPPP */ +# define COMPILER_VERSION_MAJOR DEC(__TI_COMPILER_VERSION__/1000000) +# define COMPILER_VERSION_MINOR DEC(__TI_COMPILER_VERSION__/1000 % 1000) +# define COMPILER_VERSION_PATCH DEC(__TI_COMPILER_VERSION__ % 1000) + +#elif defined(__FUJITSU) || defined(__FCC_VERSION) || defined(__fcc_version) +# define COMPILER_ID "Fujitsu" + +#elif defined(__SCO_VERSION__) +# define COMPILER_ID "SCO" + +#elif defined(__clang__) && defined(__apple_build_version__) +# define COMPILER_ID "AppleClang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif +# define COMPILER_VERSION_TWEAK DEC(__apple_build_version__) + +#elif defined(__clang__) +# define COMPILER_ID "Clang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif + +#elif defined(__GNUC__) || defined(__GNUG__) +# define COMPILER_ID "GNU" +# if defined(__GNUC__) +# define COMPILER_VERSION_MAJOR DEC(__GNUC__) +# else +# define COMPILER_VERSION_MAJOR DEC(__GNUG__) +# endif +# if defined(__GNUC_MINOR__) +# define COMPILER_VERSION_MINOR DEC(__GNUC_MINOR__) +# endif +# if defined(__GNUC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) +# endif + +#elif defined(_MSC_VER) +# define COMPILER_ID "MSVC" + /* _MSC_VER = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(_MSC_VER / 100) +# define COMPILER_VERSION_MINOR DEC(_MSC_VER % 100) +# if defined(_MSC_FULL_VER) +# if _MSC_VER >= 1400 + /* _MSC_FULL_VER = VVRRPPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 100000) +# else + /* _MSC_FULL_VER = VVRRPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 10000) +# endif +# endif +# if defined(_MSC_BUILD) +# define COMPILER_VERSION_TWEAK DEC(_MSC_BUILD) +# endif + +#elif defined(__VISUALDSPVERSION__) || defined(__ADSPBLACKFIN__) || defined(__ADSPTS__) || defined(__ADSP21000__) +# define COMPILER_ID "ADSP" +#if defined(__VISUALDSPVERSION__) + /* __VISUALDSPVERSION__ = 0xVVRRPP00 */ +# define COMPILER_VERSION_MAJOR HEX(__VISUALDSPVERSION__>>24) +# define COMPILER_VERSION_MINOR HEX(__VISUALDSPVERSION__>>16 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__VISUALDSPVERSION__>>8 & 0xFF) +#endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# define COMPILER_ID "IAR" +# if defined(__VER__) +# define COMPILER_VERSION_MAJOR DEC((__VER__) / 1000000) +# define COMPILER_VERSION_MINOR DEC(((__VER__) / 1000) % 1000) +# define COMPILER_VERSION_PATCH DEC((__VER__) % 1000) +# define COMPILER_VERSION_INTERNAL DEC(__IAR_SYSTEMS_ICC__) +# endif + +#elif defined(__ARMCC_VERSION) +# define COMPILER_ID "ARMCC" +#if __ARMCC_VERSION >= 1000000 + /* __ARMCC_VERSION = VRRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/1000000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 100) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#else + /* __ARMCC_VERSION = VRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/100000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 10) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#endif + + +#elif defined(_SGI_COMPILER_VERSION) || defined(_COMPILER_VERSION) +# define COMPILER_ID "MIPSpro" +# if defined(_SGI_COMPILER_VERSION) + /* _SGI_COMPILER_VERSION = VRP */ +# define COMPILER_VERSION_MAJOR DEC(_SGI_COMPILER_VERSION/100) +# define COMPILER_VERSION_MINOR DEC(_SGI_COMPILER_VERSION/10 % 10) +# define COMPILER_VERSION_PATCH DEC(_SGI_COMPILER_VERSION % 10) +# else + /* _COMPILER_VERSION = VRP */ +# define COMPILER_VERSION_MAJOR DEC(_COMPILER_VERSION/100) +# define COMPILER_VERSION_MINOR DEC(_COMPILER_VERSION/10 % 10) +# define COMPILER_VERSION_PATCH DEC(_COMPILER_VERSION % 10) +# endif + + +/* These compilers are either not known or too old to define an + identification macro. Try to identify the platform and guess that + it is the native compiler. */ +#elif defined(__sgi) +# define COMPILER_ID "MIPSpro" + +#elif defined(__hpux) || defined(__hpua) +# define COMPILER_ID "HP" + +#else /* unknown compiler */ +# define COMPILER_ID "" +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_compiler = "INFO" ":" "compiler[" COMPILER_ID "]"; +#ifdef SIMULATE_ID +char const* info_simulate = "INFO" ":" "simulate[" SIMULATE_ID "]"; +#endif + +#ifdef __QNXNTO__ +char const* qnxnto = "INFO" ":" "qnxnto[]"; +#endif + +#if defined(__CRAYXE) || defined(__CRAYXC) +char const *info_cray = "INFO" ":" "compiler_wrapper[CrayPrgEnv]"; +#endif + +#define STRINGIFY_HELPER(X) #X +#define STRINGIFY(X) STRINGIFY_HELPER(X) + +/* Identify known platforms by name. */ +#if defined(__linux) || defined(__linux__) || defined(linux) +# define PLATFORM_ID "Linux" + +#elif defined(__CYGWIN__) +# define PLATFORM_ID "Cygwin" + +#elif defined(__MINGW32__) +# define PLATFORM_ID "MinGW" + +#elif defined(__APPLE__) +# define PLATFORM_ID "Darwin" + +#elif defined(_WIN32) || defined(__WIN32__) || defined(WIN32) +# define PLATFORM_ID "Windows" + +#elif defined(__FreeBSD__) || defined(__FreeBSD) +# define PLATFORM_ID "FreeBSD" + +#elif defined(__NetBSD__) || defined(__NetBSD) +# define PLATFORM_ID "NetBSD" + +#elif defined(__OpenBSD__) || defined(__OPENBSD) +# define PLATFORM_ID "OpenBSD" + +#elif defined(__sun) || defined(sun) +# define PLATFORM_ID "SunOS" + +#elif defined(_AIX) || defined(__AIX) || defined(__AIX__) || defined(__aix) || defined(__aix__) +# define PLATFORM_ID "AIX" + +#elif defined(__sgi) || defined(__sgi__) || defined(_SGI) +# define PLATFORM_ID "IRIX" + +#elif defined(__hpux) || defined(__hpux__) +# define PLATFORM_ID "HP-UX" + +#elif defined(__HAIKU__) +# define PLATFORM_ID "Haiku" + +#elif defined(__BeOS) || defined(__BEOS__) || defined(_BEOS) +# define PLATFORM_ID "BeOS" + +#elif defined(__QNX__) || defined(__QNXNTO__) +# define PLATFORM_ID "QNX" + +#elif defined(__tru64) || defined(_tru64) || defined(__TRU64__) +# define PLATFORM_ID "Tru64" + +#elif defined(__riscos) || defined(__riscos__) +# define PLATFORM_ID "RISCos" + +#elif defined(__sinix) || defined(__sinix__) || defined(__SINIX__) +# define PLATFORM_ID "SINIX" + +#elif defined(__UNIX_SV__) +# define PLATFORM_ID "UNIX_SV" + +#elif defined(__bsdos__) +# define PLATFORM_ID "BSDOS" + +#elif defined(_MPRAS) || defined(MPRAS) +# define PLATFORM_ID "MP-RAS" + +#elif defined(__osf) || defined(__osf__) +# define PLATFORM_ID "OSF1" + +#elif defined(_SCO_SV) || defined(SCO_SV) || defined(sco_sv) +# define PLATFORM_ID "SCO_SV" + +#elif defined(__ultrix) || defined(__ultrix__) || defined(_ULTRIX) +# define PLATFORM_ID "ULTRIX" + +#elif defined(__XENIX__) || defined(_XENIX) || defined(XENIX) +# define PLATFORM_ID "Xenix" + +#elif defined(__WATCOMC__) +# if defined(__LINUX__) +# define PLATFORM_ID "Linux" + +# elif defined(__DOS__) +# define PLATFORM_ID "DOS" + +# elif defined(__OS2__) +# define PLATFORM_ID "OS2" + +# elif defined(__WINDOWS__) +# define PLATFORM_ID "Windows3x" + +# else /* unknown platform */ +# define PLATFORM_ID +# endif + +#else /* unknown platform */ +# define PLATFORM_ID + +#endif + +/* For windows compilers MSVC and Intel we can determine + the architecture of the compiler being used. This is because + the compilers do not have flags that can change the architecture, + but rather depend on which compiler is being used +*/ +#if defined(_WIN32) && defined(_MSC_VER) +# if defined(_M_IA64) +# define ARCHITECTURE_ID "IA64" + +# elif defined(_M_X64) || defined(_M_AMD64) +# define ARCHITECTURE_ID "x64" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# elif defined(_M_ARM64) +# define ARCHITECTURE_ID "ARM64" + +# elif defined(_M_ARM) +# if _M_ARM == 4 +# define ARCHITECTURE_ID "ARMV4I" +# elif _M_ARM == 5 +# define ARCHITECTURE_ID "ARMV5I" +# else +# define ARCHITECTURE_ID "ARMV" STRINGIFY(_M_ARM) +# endif + +# elif defined(_M_MIPS) +# define ARCHITECTURE_ID "MIPS" + +# elif defined(_M_SH) +# define ARCHITECTURE_ID "SHx" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__WATCOMC__) +# if defined(_M_I86) +# define ARCHITECTURE_ID "I86" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# if defined(__ICCARM__) +# define ARCHITECTURE_ID "ARM" + +# elif defined(__ICCAVR__) +# define ARCHITECTURE_ID "AVR" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif +#else +# define ARCHITECTURE_ID +#endif + +/* Convert integer to decimal digit literals. */ +#define DEC(n) \ + ('0' + (((n) / 10000000)%10)), \ + ('0' + (((n) / 1000000)%10)), \ + ('0' + (((n) / 100000)%10)), \ + ('0' + (((n) / 10000)%10)), \ + ('0' + (((n) / 1000)%10)), \ + ('0' + (((n) / 100)%10)), \ + ('0' + (((n) / 10)%10)), \ + ('0' + ((n) % 10)) + +/* Convert integer to hex digit literals. */ +#define HEX(n) \ + ('0' + ((n)>>28 & 0xF)), \ + ('0' + ((n)>>24 & 0xF)), \ + ('0' + ((n)>>20 & 0xF)), \ + ('0' + ((n)>>16 & 0xF)), \ + ('0' + ((n)>>12 & 0xF)), \ + ('0' + ((n)>>8 & 0xF)), \ + ('0' + ((n)>>4 & 0xF)), \ + ('0' + ((n) & 0xF)) + +/* Construct a string literal encoding the version number components. */ +#ifdef COMPILER_VERSION_MAJOR +char const info_version[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','[', + COMPILER_VERSION_MAJOR, +# ifdef COMPILER_VERSION_MINOR + '.', COMPILER_VERSION_MINOR, +# ifdef COMPILER_VERSION_PATCH + '.', COMPILER_VERSION_PATCH, +# ifdef COMPILER_VERSION_TWEAK + '.', COMPILER_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct a string literal encoding the internal version number. */ +#ifdef COMPILER_VERSION_INTERNAL +char const info_version_internal[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','_', + 'i','n','t','e','r','n','a','l','[', + COMPILER_VERSION_INTERNAL,']','\0'}; +#endif + +/* Construct a string literal encoding the version number components. */ +#ifdef SIMULATE_VERSION_MAJOR +char const info_simulate_version[] = { + 'I', 'N', 'F', 'O', ':', + 's','i','m','u','l','a','t','e','_','v','e','r','s','i','o','n','[', + SIMULATE_VERSION_MAJOR, +# ifdef SIMULATE_VERSION_MINOR + '.', SIMULATE_VERSION_MINOR, +# ifdef SIMULATE_VERSION_PATCH + '.', SIMULATE_VERSION_PATCH, +# ifdef SIMULATE_VERSION_TWEAK + '.', SIMULATE_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_platform = "INFO" ":" "platform[" PLATFORM_ID "]"; +char const* info_arch = "INFO" ":" "arch[" ARCHITECTURE_ID "]"; + + + + +#if defined(_MSC_VER) && defined(_MSVC_LANG) +#define CXX_STD _MSVC_LANG +#else +#define CXX_STD __cplusplus +#endif + +const char* info_language_dialect_default = "INFO" ":" "dialect_default[" +#if CXX_STD > 201402L + "17" +#elif CXX_STD >= 201402L + "14" +#elif CXX_STD >= 201103L + "11" +#else + "98" +#endif +"]"; + +/*--------------------------------------------------------------------------*/ + +int main(int argc, char* argv[]) +{ + int require = 0; + require += info_compiler[argc]; + require += info_platform[argc]; +#ifdef COMPILER_VERSION_MAJOR + require += info_version[argc]; +#endif +#ifdef COMPILER_VERSION_INTERNAL + require += info_version_internal[argc]; +#endif +#ifdef SIMULATE_ID + require += info_simulate[argc]; +#endif +#ifdef SIMULATE_VERSION_MAJOR + require += info_simulate_version[argc]; +#endif +#if defined(__CRAYXE) || defined(__CRAYXC) + require += info_cray[argc]; +#endif + require += info_language_dialect_default[argc]; + (void)argv; + return require; +} diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/a.out b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/a.out new file mode 100644 index 0000000000000000000000000000000000000000..764ad62eb6284915a1c822b46cb3dffbd27d5143 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/a.out differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeDirectoryInformation.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeDirectoryInformation.cmake new file mode 100644 index 0000000000000000000000000000000000000000..db5b86ccb020230f38d87958387d3539ec1cd497 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeDirectoryInformation.cmake @@ -0,0 +1,16 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +# Relative path conversion top directories. +set(CMAKE_RELATIVE_PATH_TOP_SOURCE "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src") +set(CMAKE_RELATIVE_PATH_TOP_BINARY "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host") + +# Force unix paths in dependencies. +set(CMAKE_FORCE_UNIX_PATHS 1) + + +# The C and CXX include file regular expressions for this directory. +set(CMAKE_C_INCLUDE_REGEX_SCAN "^.*$") +set(CMAKE_C_INCLUDE_REGEX_COMPLAIN "^$") +set(CMAKE_CXX_INCLUDE_REGEX_SCAN ${CMAKE_C_INCLUDE_REGEX_SCAN}) +set(CMAKE_CXX_INCLUDE_REGEX_COMPLAIN ${CMAKE_C_INCLUDE_REGEX_COMPLAIN}) diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeOutput.log b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeOutput.log new file mode 100644 index 0000000000000000000000000000000000000000..f0641cf5c7f27aea2d96dc664cd1a5e71d4d009d --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeOutput.log @@ -0,0 +1,628 @@ +The system is: Linux - 5.4.0-65-generic - x86_64 +Compiling the C compiler identification source file "CMakeCCompilerId.c" succeeded. +Compiler: /usr/bin/cc +Build flags: +Id flags: + +The output was: +0 + + +Compilation of the C compiler identification source "CMakeCCompilerId.c" produced "a.out" + +The C compiler identification is GNU, found in "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdC/a.out" + +Compiling the CXX compiler identification source file "CMakeCXXCompilerId.cpp" succeeded. +Compiler: /usr/bin/aarch64-linux-gnu-g++ +Build flags: +Id flags: + +The output was: +0 + + +Compilation of the CXX compiler identification source "CMakeCXXCompilerId.cpp" produced "a.out" + +The CXX compiler identification is GNU, found in "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/3.10.2/CompilerIdCXX/a.out" + +Determining if the C compiler works passed with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_c0394/fast" +/usr/bin/make -f CMakeFiles/cmTC_c0394.dir/build.make CMakeFiles/cmTC_c0394.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building C object CMakeFiles/cmTC_c0394.dir/testCCompiler.c.o +/usr/bin/cc -o CMakeFiles/cmTC_c0394.dir/testCCompiler.c.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp/testCCompiler.c +Linking C executable cmTC_c0394 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_c0394.dir/link.txt --verbose=1 +/usr/bin/cc CMakeFiles/cmTC_c0394.dir/testCCompiler.c.o -o cmTC_c0394 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + +Detecting C compiler ABI info compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_55368/fast" +/usr/bin/make -f CMakeFiles/cmTC_55368.dir/build.make CMakeFiles/cmTC_55368.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building C object CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o +/usr/bin/cc -o CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o -c /usr/share/cmake-3.10/Modules/CMakeCCompilerABI.c +Linking C executable cmTC_55368 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_55368.dir/link.txt --verbose=1 +/usr/bin/cc -v CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o -o cmTC_55368 +Using built-in specs. +COLLECT_GCC=/usr/bin/cc +COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper +OFFLOAD_TARGET_NAMES=nvptx-none +OFFLOAD_TARGET_DEFAULT=1 +Target: x86_64-linux-gnu +Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.5.0-3ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --enable-bootstrap --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu +Thread model: posix +gcc version 7.5.0 (Ubuntu 7.5.0-3ubuntu1~18.04) +COMPILER_PATH=/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/:/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/ +LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/:/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib/:/lib/x86_64-linux-gnu/:/lib/../lib/:/usr/lib/x86_64-linux-gnu/:/usr/lib/../lib/:/usr/lib/gcc/x86_64-linux-gnu/7/../../../:/lib/:/usr/lib/ +COLLECT_GCC_OPTIONS='-v' '-o' 'cmTC_55368' '-mtune=generic' '-march=x86-64' + /usr/lib/gcc/x86_64-linux-gnu/7/collect2 -plugin /usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/ccKGDrJp.res -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lgcc_s --build-id --eh-frame-hdr -m elf_x86_64 --hash-style=gnu --as-needed -dynamic-linker /lib64/ld-linux-x86-64.so.2 -pie -z now -z relro -o cmTC_55368 /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o -lgcc --push-state --as-needed -lgcc_s --pop-state -lc -lgcc --push-state --as-needed -lgcc_s --pop-state /usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o +COLLECT_GCC_OPTIONS='-v' '-o' 'cmTC_55368' '-mtune=generic' '-march=x86-64' +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + +Parsed C implicit link information from above output: + link line regex: [^( *|.*[/\])(ld|CMAKE_LINK_STARTFILE-NOTFOUND|([^/\]+-)?ld|collect2)[^/\]*( |$)] + ignore line: [Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp] + ignore line: [] + ignore line: [Run Build Command:"/usr/bin/make" "cmTC_55368/fast"] + ignore line: [/usr/bin/make -f CMakeFiles/cmTC_55368.dir/build.make CMakeFiles/cmTC_55368.dir/build] + ignore line: [make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp'] + ignore line: [Building C object CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o] + ignore line: [/usr/bin/cc -o CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o -c /usr/share/cmake-3.10/Modules/CMakeCCompilerABI.c] + ignore line: [Linking C executable cmTC_55368] + ignore line: [/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_55368.dir/link.txt --verbose=1] + ignore line: [/usr/bin/cc -v CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o -o cmTC_55368 ] + ignore line: [Using built-in specs.] + ignore line: [COLLECT_GCC=/usr/bin/cc] + ignore line: [COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper] + ignore line: [OFFLOAD_TARGET_NAMES=nvptx-none] + ignore line: [OFFLOAD_TARGET_DEFAULT=1] + ignore line: [Target: x86_64-linux-gnu] + ignore line: [Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.5.0-3ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --enable-bootstrap --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu] + ignore line: [Thread model: posix] + ignore line: [gcc version 7.5.0 (Ubuntu 7.5.0-3ubuntu1~18.04) ] + ignore line: [COMPILER_PATH=/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/:/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/] + ignore line: [LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/7/:/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/:/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib/:/lib/x86_64-linux-gnu/:/lib/../lib/:/usr/lib/x86_64-linux-gnu/:/usr/lib/../lib/:/usr/lib/gcc/x86_64-linux-gnu/7/../../../:/lib/:/usr/lib/] + ignore line: [COLLECT_GCC_OPTIONS='-v' '-o' 'cmTC_55368' '-mtune=generic' '-march=x86-64'] + link line: [ /usr/lib/gcc/x86_64-linux-gnu/7/collect2 -plugin /usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/ccKGDrJp.res -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lgcc_s --build-id --eh-frame-hdr -m elf_x86_64 --hash-style=gnu --as-needed -dynamic-linker /lib64/ld-linux-x86-64.so.2 -pie -z now -z relro -o cmTC_55368 /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o -lgcc --push-state --as-needed -lgcc_s --pop-state -lc -lgcc --push-state --as-needed -lgcc_s --pop-state /usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o] + arg [/usr/lib/gcc/x86_64-linux-gnu/7/collect2] ==> ignore + arg [-plugin] ==> ignore + arg [/usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so] ==> ignore + arg [-plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper] ==> ignore + arg [-plugin-opt=-fresolution=/tmp/ccKGDrJp.res] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc_s] ==> ignore + arg [-plugin-opt=-pass-through=-lc] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc_s] ==> ignore + arg [--build-id] ==> ignore + arg [--eh-frame-hdr] ==> ignore + arg [-m] ==> ignore + arg [elf_x86_64] ==> ignore + arg [--hash-style=gnu] ==> ignore + arg [--as-needed] ==> ignore + arg [-dynamic-linker] ==> ignore + arg [/lib64/ld-linux-x86-64.so.2] ==> ignore + arg [-pie] ==> ignore + arg [-znow] ==> ignore + arg [-zrelro] ==> ignore + arg [-o] ==> ignore + arg [cmTC_55368] ==> ignore + arg [/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o] ==> ignore + arg [/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o] ==> ignore + arg [/usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o] ==> ignore + arg [-L/usr/lib/gcc/x86_64-linux-gnu/7] ==> dir [/usr/lib/gcc/x86_64-linux-gnu/7] + arg [-L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu] ==> dir [/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu] + arg [-L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib] ==> dir [/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib] + arg [-L/lib/x86_64-linux-gnu] ==> dir [/lib/x86_64-linux-gnu] + arg [-L/lib/../lib] ==> dir [/lib/../lib] + arg [-L/usr/lib/x86_64-linux-gnu] ==> dir [/usr/lib/x86_64-linux-gnu] + arg [-L/usr/lib/../lib] ==> dir [/usr/lib/../lib] + arg [-L/usr/lib/gcc/x86_64-linux-gnu/7/../../..] ==> dir [/usr/lib/gcc/x86_64-linux-gnu/7/../../..] + arg [CMakeFiles/cmTC_55368.dir/CMakeCCompilerABI.c.o] ==> ignore + arg [-lgcc] ==> lib [gcc] + arg [--push-state] ==> ignore + arg [--as-needed] ==> ignore + arg [-lgcc_s] ==> lib [gcc_s] + arg [--pop-state] ==> ignore + arg [-lc] ==> lib [c] + arg [-lgcc] ==> lib [gcc] + arg [--push-state] ==> ignore + arg [--as-needed] ==> ignore + arg [-lgcc_s] ==> lib [gcc_s] + arg [--pop-state] ==> ignore + arg [/usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o] ==> ignore + arg [/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o] ==> ignore + collapse library dir [/usr/lib/gcc/x86_64-linux-gnu/7] ==> [/usr/lib/gcc/x86_64-linux-gnu/7] + collapse library dir [/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu] ==> [/usr/lib/x86_64-linux-gnu] + collapse library dir [/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib] ==> [/usr/lib] + collapse library dir [/lib/x86_64-linux-gnu] ==> [/lib/x86_64-linux-gnu] + collapse library dir [/lib/../lib] ==> [/lib] + collapse library dir [/usr/lib/x86_64-linux-gnu] ==> [/usr/lib/x86_64-linux-gnu] + collapse library dir [/usr/lib/../lib] ==> [/usr/lib] + collapse library dir [/usr/lib/gcc/x86_64-linux-gnu/7/../../..] ==> [/usr/lib] + implicit libs: [gcc;gcc_s;c;gcc;gcc_s] + implicit dirs: [/usr/lib/gcc/x86_64-linux-gnu/7;/usr/lib/x86_64-linux-gnu;/usr/lib;/lib/x86_64-linux-gnu;/lib] + implicit fwks: [] + + + + +Detecting C [-std=c11] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_759e8/fast" +/usr/bin/make -f CMakeFiles/cmTC_759e8.dir/build.make CMakeFiles/cmTC_759e8.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building C object CMakeFiles/cmTC_759e8.dir/feature_tests.c.o +/usr/bin/cc -std=c11 -o CMakeFiles/cmTC_759e8.dir/feature_tests.c.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.c +Linking C executable cmTC_759e8 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_759e8.dir/link.txt --verbose=1 +/usr/bin/cc CMakeFiles/cmTC_759e8.dir/feature_tests.c.o -o cmTC_759e8 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: C_FEATURE:1c_function_prototypes + Feature record: C_FEATURE:1c_restrict + Feature record: C_FEATURE:1c_static_assert + Feature record: C_FEATURE:1c_variadic_macros + + +Detecting C [-std=c99] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_10bc8/fast" +/usr/bin/make -f CMakeFiles/cmTC_10bc8.dir/build.make CMakeFiles/cmTC_10bc8.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building C object CMakeFiles/cmTC_10bc8.dir/feature_tests.c.o +/usr/bin/cc -std=c99 -o CMakeFiles/cmTC_10bc8.dir/feature_tests.c.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.c +Linking C executable cmTC_10bc8 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_10bc8.dir/link.txt --verbose=1 +/usr/bin/cc CMakeFiles/cmTC_10bc8.dir/feature_tests.c.o -o cmTC_10bc8 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: C_FEATURE:1c_function_prototypes + Feature record: C_FEATURE:1c_restrict + Feature record: C_FEATURE:0c_static_assert + Feature record: C_FEATURE:1c_variadic_macros + + +Detecting C [-std=c90] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_cbe30/fast" +/usr/bin/make -f CMakeFiles/cmTC_cbe30.dir/build.make CMakeFiles/cmTC_cbe30.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building C object CMakeFiles/cmTC_cbe30.dir/feature_tests.c.o +/usr/bin/cc -std=c90 -o CMakeFiles/cmTC_cbe30.dir/feature_tests.c.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.c +Linking C executable cmTC_cbe30 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_cbe30.dir/link.txt --verbose=1 +/usr/bin/cc CMakeFiles/cmTC_cbe30.dir/feature_tests.c.o -o cmTC_cbe30 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: C_FEATURE:1c_function_prototypes + Feature record: C_FEATURE:0c_restrict + Feature record: C_FEATURE:0c_static_assert + Feature record: C_FEATURE:0c_variadic_macros +Determining if the CXX compiler works passed with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_44a41/fast" +/usr/bin/make -f CMakeFiles/cmTC_44a41.dir/build.make CMakeFiles/cmTC_44a41.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building CXX object CMakeFiles/cmTC_44a41.dir/testCXXCompiler.cxx.o +/usr/bin/aarch64-linux-gnu-g++ -o CMakeFiles/cmTC_44a41.dir/testCXXCompiler.cxx.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp/testCXXCompiler.cxx +Linking CXX executable cmTC_44a41 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_44a41.dir/link.txt --verbose=1 +/usr/bin/aarch64-linux-gnu-g++ CMakeFiles/cmTC_44a41.dir/testCXXCompiler.cxx.o -o cmTC_44a41 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + +Detecting CXX compiler ABI info compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_ab0dd/fast" +/usr/bin/make -f CMakeFiles/cmTC_ab0dd.dir/build.make CMakeFiles/cmTC_ab0dd.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building CXX object CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o +/usr/bin/aarch64-linux-gnu-g++ -o CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o -c /usr/share/cmake-3.10/Modules/CMakeCXXCompilerABI.cpp +Linking CXX executable cmTC_ab0dd +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_ab0dd.dir/link.txt --verbose=1 +/usr/bin/aarch64-linux-gnu-g++ -v CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o -o cmTC_ab0dd +Using built-in specs. +COLLECT_GCC=/usr/bin/aarch64-linux-gnu-g++ +COLLECT_LTO_WRAPPER=/usr/lib/gcc-cross/aarch64-linux-gnu/7/lto-wrapper +Target: aarch64-linux-gnu +Configured with: ../src/configure -v --with-pkgversion='Ubuntu/Linaro 7.5.0-3ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-libquadmath --disable-libquadmath-support --enable-plugin --enable-default-pie --with-system-zlib --enable-multiarch --enable-fix-cortex-a53-843419 --disable-werror --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=aarch64-linux-gnu --program-prefix=aarch64-linux-gnu- --includedir=/usr/aarch64-linux-gnu/include +Thread model: posix +gcc version 7.5.0 (Ubuntu/Linaro 7.5.0-3ubuntu1~18.04) +COMPILER_PATH=/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/bin/ +LIBRARY_PATH=/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/:/lib/aarch64-linux-gnu/:/lib/../lib/:/usr/lib/aarch64-linux-gnu/:/usr/lib/../lib/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/:/lib/:/usr/lib/ +COLLECT_GCC_OPTIONS='-v' '-o' 'cmTC_ab0dd' '-shared-libgcc' '-mlittle-endian' '-mabi=lp64' + /usr/lib/gcc-cross/aarch64-linux-gnu/7/collect2 -plugin /usr/lib/gcc-cross/aarch64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc-cross/aarch64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/ccmvvlHG.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc --sysroot=/ --build-id --eh-frame-hdr --hash-style=gnu --as-needed -dynamic-linker /lib/ld-linux-aarch64.so.1 -X -EL -maarch64linux --fix-cortex-a53-843419 -pie -z now -z relro -o cmTC_ab0dd /usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/Scrt1.o /usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/crti.o /usr/lib/gcc-cross/aarch64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc-cross/aarch64-linux-gnu/7 -L/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib -L/lib/aarch64-linux-gnu -L/lib/../lib -L/usr/lib/aarch64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o -lstdc++ -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /usr/lib/gcc-cross/aarch64-linux-gnu/7/crtendS.o /usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/crtn.o +COLLECT_GCC_OPTIONS='-v' '-o' 'cmTC_ab0dd' '-shared-libgcc' '-mlittle-endian' '-mabi=lp64' +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + +Parsed CXX implicit link information from above output: + link line regex: [^( *|.*[/\])(ld|CMAKE_LINK_STARTFILE-NOTFOUND|([^/\]+-)?ld|collect2)[^/\]*( |$)] + ignore line: [Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp] + ignore line: [] + ignore line: [Run Build Command:"/usr/bin/make" "cmTC_ab0dd/fast"] + ignore line: [/usr/bin/make -f CMakeFiles/cmTC_ab0dd.dir/build.make CMakeFiles/cmTC_ab0dd.dir/build] + ignore line: [make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp'] + ignore line: [Building CXX object CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o] + ignore line: [/usr/bin/aarch64-linux-gnu-g++ -o CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o -c /usr/share/cmake-3.10/Modules/CMakeCXXCompilerABI.cpp] + ignore line: [Linking CXX executable cmTC_ab0dd] + ignore line: [/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_ab0dd.dir/link.txt --verbose=1] + ignore line: [/usr/bin/aarch64-linux-gnu-g++ -v CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o -o cmTC_ab0dd ] + ignore line: [Using built-in specs.] + ignore line: [COLLECT_GCC=/usr/bin/aarch64-linux-gnu-g++] + ignore line: [COLLECT_LTO_WRAPPER=/usr/lib/gcc-cross/aarch64-linux-gnu/7/lto-wrapper] + ignore line: [Target: aarch64-linux-gnu] + ignore line: [Configured with: ../src/configure -v --with-pkgversion='Ubuntu/Linaro 7.5.0-3ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-libquadmath --disable-libquadmath-support --enable-plugin --enable-default-pie --with-system-zlib --enable-multiarch --enable-fix-cortex-a53-843419 --disable-werror --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=aarch64-linux-gnu --program-prefix=aarch64-linux-gnu- --includedir=/usr/aarch64-linux-gnu/include] + ignore line: [Thread model: posix] + ignore line: [gcc version 7.5.0 (Ubuntu/Linaro 7.5.0-3ubuntu1~18.04) ] + ignore line: [COMPILER_PATH=/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/bin/] + ignore line: [LIBRARY_PATH=/usr/lib/gcc-cross/aarch64-linux-gnu/7/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/:/lib/aarch64-linux-gnu/:/lib/../lib/:/usr/lib/aarch64-linux-gnu/:/usr/lib/../lib/:/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/:/lib/:/usr/lib/] + ignore line: [COLLECT_GCC_OPTIONS='-v' '-o' 'cmTC_ab0dd' '-shared-libgcc' '-mlittle-endian' '-mabi=lp64'] + link line: [ /usr/lib/gcc-cross/aarch64-linux-gnu/7/collect2 -plugin /usr/lib/gcc-cross/aarch64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc-cross/aarch64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/ccmvvlHG.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc --sysroot=/ --build-id --eh-frame-hdr --hash-style=gnu --as-needed -dynamic-linker /lib/ld-linux-aarch64.so.1 -X -EL -maarch64linux --fix-cortex-a53-843419 -pie -z now -z relro -o cmTC_ab0dd /usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/Scrt1.o /usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/crti.o /usr/lib/gcc-cross/aarch64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc-cross/aarch64-linux-gnu/7 -L/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib -L/lib/aarch64-linux-gnu -L/lib/../lib -L/usr/lib/aarch64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o -lstdc++ -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /usr/lib/gcc-cross/aarch64-linux-gnu/7/crtendS.o /usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/crtn.o] + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/collect2] ==> ignore + arg [-plugin] ==> ignore + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/liblto_plugin.so] ==> ignore + arg [-plugin-opt=/usr/lib/gcc-cross/aarch64-linux-gnu/7/lto-wrapper] ==> ignore + arg [-plugin-opt=-fresolution=/tmp/ccmvvlHG.res] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc_s] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc] ==> ignore + arg [-plugin-opt=-pass-through=-lc] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc_s] ==> ignore + arg [-plugin-opt=-pass-through=-lgcc] ==> ignore + arg [--sysroot=/] ==> ignore + arg [--build-id] ==> ignore + arg [--eh-frame-hdr] ==> ignore + arg [--hash-style=gnu] ==> ignore + arg [--as-needed] ==> ignore + arg [-dynamic-linker] ==> ignore + arg [/lib/ld-linux-aarch64.so.1] ==> ignore + arg [-X] ==> ignore + arg [-EL] ==> ignore + arg [-maarch64linux] ==> ignore + arg [--fix-cortex-a53-843419] ==> ignore + arg [-pie] ==> ignore + arg [-znow] ==> ignore + arg [-zrelro] ==> ignore + arg [-o] ==> ignore + arg [cmTC_ab0dd] ==> ignore + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/Scrt1.o] ==> ignore + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/crti.o] ==> ignore + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/crtbeginS.o] ==> ignore + arg [-L/usr/lib/gcc-cross/aarch64-linux-gnu/7] ==> dir [/usr/lib/gcc-cross/aarch64-linux-gnu/7] + arg [-L/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib] ==> dir [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib] + arg [-L/lib/aarch64-linux-gnu] ==> dir [/lib/aarch64-linux-gnu] + arg [-L/lib/../lib] ==> dir [/lib/../lib] + arg [-L/usr/lib/aarch64-linux-gnu] ==> dir [/usr/lib/aarch64-linux-gnu] + arg [-L/usr/lib/../lib] ==> dir [/usr/lib/../lib] + arg [-L/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib] ==> dir [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib] + arg [CMakeFiles/cmTC_ab0dd.dir/CMakeCXXCompilerABI.cpp.o] ==> ignore + arg [-lstdc++] ==> lib [stdc++] + arg [-lm] ==> lib [m] + arg [-lgcc_s] ==> lib [gcc_s] + arg [-lgcc] ==> lib [gcc] + arg [-lc] ==> lib [c] + arg [-lgcc_s] ==> lib [gcc_s] + arg [-lgcc] ==> lib [gcc] + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/crtendS.o] ==> ignore + arg [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib/crtn.o] ==> ignore + collapse library dir [/usr/lib/gcc-cross/aarch64-linux-gnu/7] ==> [/usr/lib/gcc-cross/aarch64-linux-gnu/7] + collapse library dir [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib/../lib] ==> [/usr/aarch64-linux-gnu/lib] + collapse library dir [/lib/aarch64-linux-gnu] ==> [/lib/aarch64-linux-gnu] + collapse library dir [/lib/../lib] ==> [/lib] + collapse library dir [/usr/lib/aarch64-linux-gnu] ==> [/usr/lib/aarch64-linux-gnu] + collapse library dir [/usr/lib/../lib] ==> [/usr/lib] + collapse library dir [/usr/lib/gcc-cross/aarch64-linux-gnu/7/../../../../aarch64-linux-gnu/lib] ==> [/usr/aarch64-linux-gnu/lib] + implicit libs: [stdc++;m;gcc_s;gcc;c;gcc_s;gcc] + implicit dirs: [/usr/lib/gcc-cross/aarch64-linux-gnu/7;/usr/aarch64-linux-gnu/lib;/lib/aarch64-linux-gnu;/lib;/usr/lib/aarch64-linux-gnu;/usr/lib] + implicit fwks: [] + + + + +Detecting CXX [-std=c++1z] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_240b2/fast" +/usr/bin/make -f CMakeFiles/cmTC_240b2.dir/build.make CMakeFiles/cmTC_240b2.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building CXX object CMakeFiles/cmTC_240b2.dir/feature_tests.cxx.o +/usr/bin/aarch64-linux-gnu-g++ -std=c++1z -o CMakeFiles/cmTC_240b2.dir/feature_tests.cxx.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx +Linking CXX executable cmTC_240b2 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_240b2.dir/link.txt --verbose=1 +/usr/bin/aarch64-linux-gnu-g++ CMakeFiles/cmTC_240b2.dir/feature_tests.cxx.o -o cmTC_240b2 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: CXX_FEATURE:1cxx_aggregate_default_initializers + Feature record: CXX_FEATURE:1cxx_alias_templates + Feature record: CXX_FEATURE:1cxx_alignas + Feature record: CXX_FEATURE:1cxx_alignof + Feature record: CXX_FEATURE:1cxx_attributes + Feature record: CXX_FEATURE:1cxx_attribute_deprecated + Feature record: CXX_FEATURE:1cxx_auto_type + Feature record: CXX_FEATURE:1cxx_binary_literals + Feature record: CXX_FEATURE:1cxx_constexpr + Feature record: CXX_FEATURE:1cxx_contextual_conversions + Feature record: CXX_FEATURE:1cxx_decltype + Feature record: CXX_FEATURE:1cxx_decltype_auto + Feature record: CXX_FEATURE:1cxx_decltype_incomplete_return_types + Feature record: CXX_FEATURE:1cxx_default_function_template_args + Feature record: CXX_FEATURE:1cxx_defaulted_functions + Feature record: CXX_FEATURE:1cxx_defaulted_move_initializers + Feature record: CXX_FEATURE:1cxx_delegating_constructors + Feature record: CXX_FEATURE:1cxx_deleted_functions + Feature record: CXX_FEATURE:1cxx_digit_separators + Feature record: CXX_FEATURE:1cxx_enum_forward_declarations + Feature record: CXX_FEATURE:1cxx_explicit_conversions + Feature record: CXX_FEATURE:1cxx_extended_friend_declarations + Feature record: CXX_FEATURE:1cxx_extern_templates + Feature record: CXX_FEATURE:1cxx_final + Feature record: CXX_FEATURE:1cxx_func_identifier + Feature record: CXX_FEATURE:1cxx_generalized_initializers + Feature record: CXX_FEATURE:1cxx_generic_lambdas + Feature record: CXX_FEATURE:1cxx_inheriting_constructors + Feature record: CXX_FEATURE:1cxx_inline_namespaces + Feature record: CXX_FEATURE:1cxx_lambdas + Feature record: CXX_FEATURE:1cxx_lambda_init_captures + Feature record: CXX_FEATURE:1cxx_local_type_template_args + Feature record: CXX_FEATURE:1cxx_long_long_type + Feature record: CXX_FEATURE:1cxx_noexcept + Feature record: CXX_FEATURE:1cxx_nonstatic_member_init + Feature record: CXX_FEATURE:1cxx_nullptr + Feature record: CXX_FEATURE:1cxx_override + Feature record: CXX_FEATURE:1cxx_range_for + Feature record: CXX_FEATURE:1cxx_raw_string_literals + Feature record: CXX_FEATURE:1cxx_reference_qualified_functions + Feature record: CXX_FEATURE:1cxx_relaxed_constexpr + Feature record: CXX_FEATURE:1cxx_return_type_deduction + Feature record: CXX_FEATURE:1cxx_right_angle_brackets + Feature record: CXX_FEATURE:1cxx_rvalue_references + Feature record: CXX_FEATURE:1cxx_sizeof_member + Feature record: CXX_FEATURE:1cxx_static_assert + Feature record: CXX_FEATURE:1cxx_strong_enums + Feature record: CXX_FEATURE:1cxx_template_template_parameters + Feature record: CXX_FEATURE:1cxx_thread_local + Feature record: CXX_FEATURE:1cxx_trailing_return_types + Feature record: CXX_FEATURE:1cxx_unicode_literals + Feature record: CXX_FEATURE:1cxx_uniform_initialization + Feature record: CXX_FEATURE:1cxx_unrestricted_unions + Feature record: CXX_FEATURE:1cxx_user_literals + Feature record: CXX_FEATURE:1cxx_variable_templates + Feature record: CXX_FEATURE:1cxx_variadic_macros + Feature record: CXX_FEATURE:1cxx_variadic_templates + + +Detecting CXX [-std=c++14] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_59d7e/fast" +/usr/bin/make -f CMakeFiles/cmTC_59d7e.dir/build.make CMakeFiles/cmTC_59d7e.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building CXX object CMakeFiles/cmTC_59d7e.dir/feature_tests.cxx.o +/usr/bin/aarch64-linux-gnu-g++ -std=c++14 -o CMakeFiles/cmTC_59d7e.dir/feature_tests.cxx.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx +Linking CXX executable cmTC_59d7e +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_59d7e.dir/link.txt --verbose=1 +/usr/bin/aarch64-linux-gnu-g++ CMakeFiles/cmTC_59d7e.dir/feature_tests.cxx.o -o cmTC_59d7e +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: CXX_FEATURE:1cxx_aggregate_default_initializers + Feature record: CXX_FEATURE:1cxx_alias_templates + Feature record: CXX_FEATURE:1cxx_alignas + Feature record: CXX_FEATURE:1cxx_alignof + Feature record: CXX_FEATURE:1cxx_attributes + Feature record: CXX_FEATURE:1cxx_attribute_deprecated + Feature record: CXX_FEATURE:1cxx_auto_type + Feature record: CXX_FEATURE:1cxx_binary_literals + Feature record: CXX_FEATURE:1cxx_constexpr + Feature record: CXX_FEATURE:1cxx_contextual_conversions + Feature record: CXX_FEATURE:1cxx_decltype + Feature record: CXX_FEATURE:1cxx_decltype_auto + Feature record: CXX_FEATURE:1cxx_decltype_incomplete_return_types + Feature record: CXX_FEATURE:1cxx_default_function_template_args + Feature record: CXX_FEATURE:1cxx_defaulted_functions + Feature record: CXX_FEATURE:1cxx_defaulted_move_initializers + Feature record: CXX_FEATURE:1cxx_delegating_constructors + Feature record: CXX_FEATURE:1cxx_deleted_functions + Feature record: CXX_FEATURE:1cxx_digit_separators + Feature record: CXX_FEATURE:1cxx_enum_forward_declarations + Feature record: CXX_FEATURE:1cxx_explicit_conversions + Feature record: CXX_FEATURE:1cxx_extended_friend_declarations + Feature record: CXX_FEATURE:1cxx_extern_templates + Feature record: CXX_FEATURE:1cxx_final + Feature record: CXX_FEATURE:1cxx_func_identifier + Feature record: CXX_FEATURE:1cxx_generalized_initializers + Feature record: CXX_FEATURE:1cxx_generic_lambdas + Feature record: CXX_FEATURE:1cxx_inheriting_constructors + Feature record: CXX_FEATURE:1cxx_inline_namespaces + Feature record: CXX_FEATURE:1cxx_lambdas + Feature record: CXX_FEATURE:1cxx_lambda_init_captures + Feature record: CXX_FEATURE:1cxx_local_type_template_args + Feature record: CXX_FEATURE:1cxx_long_long_type + Feature record: CXX_FEATURE:1cxx_noexcept + Feature record: CXX_FEATURE:1cxx_nonstatic_member_init + Feature record: CXX_FEATURE:1cxx_nullptr + Feature record: CXX_FEATURE:1cxx_override + Feature record: CXX_FEATURE:1cxx_range_for + Feature record: CXX_FEATURE:1cxx_raw_string_literals + Feature record: CXX_FEATURE:1cxx_reference_qualified_functions + Feature record: CXX_FEATURE:1cxx_relaxed_constexpr + Feature record: CXX_FEATURE:1cxx_return_type_deduction + Feature record: CXX_FEATURE:1cxx_right_angle_brackets + Feature record: CXX_FEATURE:1cxx_rvalue_references + Feature record: CXX_FEATURE:1cxx_sizeof_member + Feature record: CXX_FEATURE:1cxx_static_assert + Feature record: CXX_FEATURE:1cxx_strong_enums + Feature record: CXX_FEATURE:1cxx_template_template_parameters + Feature record: CXX_FEATURE:1cxx_thread_local + Feature record: CXX_FEATURE:1cxx_trailing_return_types + Feature record: CXX_FEATURE:1cxx_unicode_literals + Feature record: CXX_FEATURE:1cxx_uniform_initialization + Feature record: CXX_FEATURE:1cxx_unrestricted_unions + Feature record: CXX_FEATURE:1cxx_user_literals + Feature record: CXX_FEATURE:1cxx_variable_templates + Feature record: CXX_FEATURE:1cxx_variadic_macros + Feature record: CXX_FEATURE:1cxx_variadic_templates + + +Detecting CXX [-std=c++11] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_cfd38/fast" +/usr/bin/make -f CMakeFiles/cmTC_cfd38.dir/build.make CMakeFiles/cmTC_cfd38.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building CXX object CMakeFiles/cmTC_cfd38.dir/feature_tests.cxx.o +/usr/bin/aarch64-linux-gnu-g++ -std=c++11 -o CMakeFiles/cmTC_cfd38.dir/feature_tests.cxx.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx +Linking CXX executable cmTC_cfd38 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_cfd38.dir/link.txt --verbose=1 +/usr/bin/aarch64-linux-gnu-g++ CMakeFiles/cmTC_cfd38.dir/feature_tests.cxx.o -o cmTC_cfd38 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: CXX_FEATURE:0cxx_aggregate_default_initializers + Feature record: CXX_FEATURE:1cxx_alias_templates + Feature record: CXX_FEATURE:1cxx_alignas + Feature record: CXX_FEATURE:1cxx_alignof + Feature record: CXX_FEATURE:1cxx_attributes + Feature record: CXX_FEATURE:0cxx_attribute_deprecated + Feature record: CXX_FEATURE:1cxx_auto_type + Feature record: CXX_FEATURE:0cxx_binary_literals + Feature record: CXX_FEATURE:1cxx_constexpr + Feature record: CXX_FEATURE:0cxx_contextual_conversions + Feature record: CXX_FEATURE:1cxx_decltype + Feature record: CXX_FEATURE:0cxx_decltype_auto + Feature record: CXX_FEATURE:1cxx_decltype_incomplete_return_types + Feature record: CXX_FEATURE:1cxx_default_function_template_args + Feature record: CXX_FEATURE:1cxx_defaulted_functions + Feature record: CXX_FEATURE:1cxx_defaulted_move_initializers + Feature record: CXX_FEATURE:1cxx_delegating_constructors + Feature record: CXX_FEATURE:1cxx_deleted_functions + Feature record: CXX_FEATURE:0cxx_digit_separators + Feature record: CXX_FEATURE:1cxx_enum_forward_declarations + Feature record: CXX_FEATURE:1cxx_explicit_conversions + Feature record: CXX_FEATURE:1cxx_extended_friend_declarations + Feature record: CXX_FEATURE:1cxx_extern_templates + Feature record: CXX_FEATURE:1cxx_final + Feature record: CXX_FEATURE:1cxx_func_identifier + Feature record: CXX_FEATURE:1cxx_generalized_initializers + Feature record: CXX_FEATURE:0cxx_generic_lambdas + Feature record: CXX_FEATURE:1cxx_inheriting_constructors + Feature record: CXX_FEATURE:1cxx_inline_namespaces + Feature record: CXX_FEATURE:1cxx_lambdas + Feature record: CXX_FEATURE:0cxx_lambda_init_captures + Feature record: CXX_FEATURE:1cxx_local_type_template_args + Feature record: CXX_FEATURE:1cxx_long_long_type + Feature record: CXX_FEATURE:1cxx_noexcept + Feature record: CXX_FEATURE:1cxx_nonstatic_member_init + Feature record: CXX_FEATURE:1cxx_nullptr + Feature record: CXX_FEATURE:1cxx_override + Feature record: CXX_FEATURE:1cxx_range_for + Feature record: CXX_FEATURE:1cxx_raw_string_literals + Feature record: CXX_FEATURE:1cxx_reference_qualified_functions + Feature record: CXX_FEATURE:0cxx_relaxed_constexpr + Feature record: CXX_FEATURE:0cxx_return_type_deduction + Feature record: CXX_FEATURE:1cxx_right_angle_brackets + Feature record: CXX_FEATURE:1cxx_rvalue_references + Feature record: CXX_FEATURE:1cxx_sizeof_member + Feature record: CXX_FEATURE:1cxx_static_assert + Feature record: CXX_FEATURE:1cxx_strong_enums + Feature record: CXX_FEATURE:1cxx_template_template_parameters + Feature record: CXX_FEATURE:1cxx_thread_local + Feature record: CXX_FEATURE:1cxx_trailing_return_types + Feature record: CXX_FEATURE:1cxx_unicode_literals + Feature record: CXX_FEATURE:1cxx_uniform_initialization + Feature record: CXX_FEATURE:1cxx_unrestricted_unions + Feature record: CXX_FEATURE:1cxx_user_literals + Feature record: CXX_FEATURE:0cxx_variable_templates + Feature record: CXX_FEATURE:1cxx_variadic_macros + Feature record: CXX_FEATURE:1cxx_variadic_templates + + +Detecting CXX [-std=c++98] compiler features compiled with the following output: +Change Dir: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp + +Run Build Command:"/usr/bin/make" "cmTC_6f057/fast" +/usr/bin/make -f CMakeFiles/cmTC_6f057.dir/build.make CMakeFiles/cmTC_6f057.dir/build +make[1]: Entering directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' +Building CXX object CMakeFiles/cmTC_6f057.dir/feature_tests.cxx.o +/usr/bin/aarch64-linux-gnu-g++ -std=c++98 -o CMakeFiles/cmTC_6f057.dir/feature_tests.cxx.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx +Linking CXX executable cmTC_6f057 +/usr/bin/cmake -E cmake_link_script CMakeFiles/cmTC_6f057.dir/link.txt --verbose=1 +/usr/bin/aarch64-linux-gnu-g++ CMakeFiles/cmTC_6f057.dir/feature_tests.cxx.o -o cmTC_6f057 +make[1]: Leaving directory '/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/CMakeTmp' + + + Feature record: CXX_FEATURE:0cxx_aggregate_default_initializers + Feature record: CXX_FEATURE:0cxx_alias_templates + Feature record: CXX_FEATURE:0cxx_alignas + Feature record: CXX_FEATURE:0cxx_alignof + Feature record: CXX_FEATURE:0cxx_attributes + Feature record: CXX_FEATURE:0cxx_attribute_deprecated + Feature record: CXX_FEATURE:0cxx_auto_type + Feature record: CXX_FEATURE:0cxx_binary_literals + Feature record: CXX_FEATURE:0cxx_constexpr + Feature record: CXX_FEATURE:0cxx_contextual_conversions + Feature record: CXX_FEATURE:0cxx_decltype + Feature record: CXX_FEATURE:0cxx_decltype_auto + Feature record: CXX_FEATURE:0cxx_decltype_incomplete_return_types + Feature record: CXX_FEATURE:0cxx_default_function_template_args + Feature record: CXX_FEATURE:0cxx_defaulted_functions + Feature record: CXX_FEATURE:0cxx_defaulted_move_initializers + Feature record: CXX_FEATURE:0cxx_delegating_constructors + Feature record: CXX_FEATURE:0cxx_deleted_functions + Feature record: CXX_FEATURE:0cxx_digit_separators + Feature record: CXX_FEATURE:0cxx_enum_forward_declarations + Feature record: CXX_FEATURE:0cxx_explicit_conversions + Feature record: CXX_FEATURE:0cxx_extended_friend_declarations + Feature record: CXX_FEATURE:0cxx_extern_templates + Feature record: CXX_FEATURE:0cxx_final + Feature record: CXX_FEATURE:0cxx_func_identifier + Feature record: CXX_FEATURE:0cxx_generalized_initializers + Feature record: CXX_FEATURE:0cxx_generic_lambdas + Feature record: CXX_FEATURE:0cxx_inheriting_constructors + Feature record: CXX_FEATURE:0cxx_inline_namespaces + Feature record: CXX_FEATURE:0cxx_lambdas + Feature record: CXX_FEATURE:0cxx_lambda_init_captures + Feature record: CXX_FEATURE:0cxx_local_type_template_args + Feature record: CXX_FEATURE:0cxx_long_long_type + Feature record: CXX_FEATURE:0cxx_noexcept + Feature record: CXX_FEATURE:0cxx_nonstatic_member_init + Feature record: CXX_FEATURE:0cxx_nullptr + Feature record: CXX_FEATURE:0cxx_override + Feature record: CXX_FEATURE:0cxx_range_for + Feature record: CXX_FEATURE:0cxx_raw_string_literals + Feature record: CXX_FEATURE:0cxx_reference_qualified_functions + Feature record: CXX_FEATURE:0cxx_relaxed_constexpr + Feature record: CXX_FEATURE:0cxx_return_type_deduction + Feature record: CXX_FEATURE:0cxx_right_angle_brackets + Feature record: CXX_FEATURE:0cxx_rvalue_references + Feature record: CXX_FEATURE:0cxx_sizeof_member + Feature record: CXX_FEATURE:0cxx_static_assert + Feature record: CXX_FEATURE:0cxx_strong_enums + Feature record: CXX_FEATURE:1cxx_template_template_parameters + Feature record: CXX_FEATURE:0cxx_thread_local + Feature record: CXX_FEATURE:0cxx_trailing_return_types + Feature record: CXX_FEATURE:0cxx_unicode_literals + Feature record: CXX_FEATURE:0cxx_uniform_initialization + Feature record: CXX_FEATURE:0cxx_unrestricted_unions + Feature record: CXX_FEATURE:0cxx_user_literals + Feature record: CXX_FEATURE:0cxx_variable_templates + Feature record: CXX_FEATURE:0cxx_variadic_macros + Feature record: CXX_FEATURE:0cxx_variadic_templates diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/Makefile.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/Makefile.cmake new file mode 100644 index 0000000000000000000000000000000000000000..9d96e2df487cb632afc6bd4b8f5914a605713969 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/Makefile.cmake @@ -0,0 +1,46 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +# The generator used is: +set(CMAKE_DEPENDS_GENERATOR "Unix Makefiles") + +# The top level Makefile was generated from the following files: +set(CMAKE_MAKEFILE_DEPENDS + "CMakeCache.txt" + "CMakeFiles/3.10.2/CMakeCCompiler.cmake" + "CMakeFiles/3.10.2/CMakeCXXCompiler.cmake" + "CMakeFiles/3.10.2/CMakeSystem.cmake" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/CMakeLists.txt" + "/usr/share/cmake-3.10/Modules/CMakeCInformation.cmake" + "/usr/share/cmake-3.10/Modules/CMakeCXXInformation.cmake" + "/usr/share/cmake-3.10/Modules/CMakeCommonLanguageInclude.cmake" + "/usr/share/cmake-3.10/Modules/CMakeGenericSystem.cmake" + "/usr/share/cmake-3.10/Modules/CMakeLanguageInformation.cmake" + "/usr/share/cmake-3.10/Modules/CMakeSystemSpecificInformation.cmake" + "/usr/share/cmake-3.10/Modules/CMakeSystemSpecificInitialize.cmake" + "/usr/share/cmake-3.10/Modules/Compiler/CMakeCommonCompilerMacros.cmake" + "/usr/share/cmake-3.10/Modules/Compiler/GNU-C.cmake" + "/usr/share/cmake-3.10/Modules/Compiler/GNU-CXX.cmake" + "/usr/share/cmake-3.10/Modules/Compiler/GNU.cmake" + "/usr/share/cmake-3.10/Modules/Platform/Linux-GNU-C.cmake" + "/usr/share/cmake-3.10/Modules/Platform/Linux-GNU-CXX.cmake" + "/usr/share/cmake-3.10/Modules/Platform/Linux-GNU.cmake" + "/usr/share/cmake-3.10/Modules/Platform/Linux.cmake" + "/usr/share/cmake-3.10/Modules/Platform/UnixPaths.cmake" + ) + +# The corresponding makefile is: +set(CMAKE_MAKEFILE_OUTPUTS + "Makefile" + "CMakeFiles/cmake.check_cache" + ) + +# Byproducts of CMake generate step: +set(CMAKE_MAKEFILE_PRODUCTS + "CMakeFiles/CMakeDirectoryInformation.cmake" + ) + +# Dependency information for all targets: +set(CMAKE_DEPEND_INFO_FILES + "CMakeFiles/main.dir/DependInfo.cmake" + ) diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/Makefile2 b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/Makefile2 new file mode 100644 index 0000000000000000000000000000000000000000..1ae8b027da92fdf110cf7447bf58c491514168d8 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/Makefile2 @@ -0,0 +1,108 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +# Default target executed when no arguments are given to make. +default_target: all + +.PHONY : default_target + +# The main recursive all target +all: + +.PHONY : all + +# The main recursive preinstall target +preinstall: + +.PHONY : preinstall + +#============================================================================= +# Special targets provided by cmake. + +# Disable implicit rules so canonical targets will work. +.SUFFIXES: + + +# Remove some rules from gmake that .SUFFIXES does not remove. +SUFFIXES = + +.SUFFIXES: .hpux_make_needs_suffix_list + + +# Suppress display of executed commands. +$(VERBOSE).SILENT: + + +# A target that is always out of date. +cmake_force: + +.PHONY : cmake_force + +#============================================================================= +# Set environment variables for the build. + +# The shell in which to execute make rules. +SHELL = /bin/sh + +# The CMake executable. +CMAKE_COMMAND = /usr/bin/cmake + +# The command to remove a file. +RM = /usr/bin/cmake -E remove -f + +# Escaping for special characters. +EQUALS = = + +# The top-level source directory on which CMake was run. +CMAKE_SOURCE_DIR = /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src + +# The top-level build directory on which CMake was run. +CMAKE_BINARY_DIR = /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host + +#============================================================================= +# Target rules for target CMakeFiles/main.dir + +# All Build rule for target. +CMakeFiles/main.dir/all: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/depend + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/build + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=1,2,3,4,5,6,7,8,9 "Built target main" +.PHONY : CMakeFiles/main.dir/all + +# Include target in all. +all: CMakeFiles/main.dir/all + +.PHONY : all + +# Build rule for subdir invocation for target. +CMakeFiles/main.dir/rule: cmake_check_build_system + $(CMAKE_COMMAND) -E cmake_progress_start /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles 9 + $(MAKE) -f CMakeFiles/Makefile2 CMakeFiles/main.dir/all + $(CMAKE_COMMAND) -E cmake_progress_start /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles 0 +.PHONY : CMakeFiles/main.dir/rule + +# Convenience name for target. +main: CMakeFiles/main.dir/rule + +.PHONY : main + +# clean rule for target. +CMakeFiles/main.dir/clean: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/clean +.PHONY : CMakeFiles/main.dir/clean + +# clean rule for target. +clean: CMakeFiles/main.dir/clean + +.PHONY : clean + +#============================================================================= +# Special targets to cleanup operation of make. + +# Special rule to run CMake to check the build system integrity. +# No rule that depends on this can have commands that come from listfiles +# because they might be regenerated. +cmake_check_build_system: + $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0 +.PHONY : cmake_check_build_system + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/TargetDirectories.txt b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/TargetDirectories.txt new file mode 100644 index 0000000000000000000000000000000000000000..49cef8e5463bccbc28d7b70eb91825274dcce90c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/TargetDirectories.txt @@ -0,0 +1,7 @@ +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/install/strip.dir +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/edit_cache.dir +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/rebuild_cache.dir +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/list_install_components.dir +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/install/local.dir +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/install.dir diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/cmake.check_cache b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/cmake.check_cache new file mode 100644 index 0000000000000000000000000000000000000000..3dccd731726d7faa8b29d8d7dba3b981a53ca497 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/cmake.check_cache @@ -0,0 +1 @@ +# This file is generated by cmake for dependency checking of the CMakeCache.txt file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.bin b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.bin new file mode 100644 index 0000000000000000000000000000000000000000..63acb258faea1dc09cc815d2a13f5d7b7fea1024 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.bin differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.c b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.c new file mode 100644 index 0000000000000000000000000000000000000000..83e86dd8cd85f9f7554f51122b8cd08412ec01f2 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.c @@ -0,0 +1,34 @@ + + const char features[] = {"\n" +"C_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 304 +"1" +#else +"0" +#endif +"c_function_prototypes\n" +"C_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 304 && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L +"1" +#else +"0" +#endif +"c_restrict\n" +"C_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201000L +"1" +#else +"0" +#endif +"c_static_assert\n" +"C_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 304 && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L +"1" +#else +"0" +#endif +"c_variadic_macros\n" + +}; + +int main(int argc, char** argv) { (void)argv; return features[argc]; } diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx new file mode 100644 index 0000000000000000000000000000000000000000..b93418c6ed69feaf1b5c2feb9592bbdb5a5f042c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/feature_tests.cxx @@ -0,0 +1,405 @@ + + const char features[] = {"\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 500 && __cplusplus >= 201402L +"1" +#else +"0" +#endif +"cxx_aggregate_default_initializers\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_alias_templates\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_alignas\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_alignof\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_attributes\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_attribute_deprecated\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_auto_type\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_binary_literals\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_constexpr\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_contextual_conversions\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_decltype\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_decltype_auto\n" +"CXX_FEATURE:" +#if ((__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) >= 40801) && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_decltype_incomplete_return_types\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_default_function_template_args\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_defaulted_functions\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_defaulted_move_initializers\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_delegating_constructors\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_deleted_functions\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_digit_separators\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_enum_forward_declarations\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 405 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_explicit_conversions\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_extended_friend_declarations\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_extern_templates\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_final\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_func_identifier\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_generalized_initializers\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_generic_lambdas\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_inheriting_constructors\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_inline_namespaces\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 405 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_lambdas\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_lambda_init_captures\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 405 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_local_type_template_args\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_long_long_type\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_noexcept\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_nonstatic_member_init\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_nullptr\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_override\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_range_for\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 405 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_raw_string_literals\n" +"CXX_FEATURE:" +#if ((__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) >= 40801) && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_reference_qualified_functions\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 500 && __cplusplus >= 201402L +"1" +#else +"0" +#endif +"cxx_relaxed_constexpr\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 409 && __cplusplus > 201103L +"1" +#else +"0" +#endif +"cxx_return_type_deduction\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_right_angle_brackets\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_rvalue_references\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_sizeof_member\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_static_assert\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_strong_enums\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && __cplusplus +"1" +#else +"0" +#endif +"cxx_template_template_parameters\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_thread_local\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_trailing_return_types\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_unicode_literals\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_uniform_initialization\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_unrestricted_unions\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 407 && __cplusplus >= 201103L +"1" +#else +"0" +#endif +"cxx_user_literals\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 500 && __cplusplus >= 201402L +"1" +#else +"0" +#endif +"cxx_variable_templates\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_variadic_macros\n" +"CXX_FEATURE:" +#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 404 && (__cplusplus >= 201103L || (defined(__GXX_EXPERIMENTAL_CXX0X__) && __GXX_EXPERIMENTAL_CXX0X__)) +"1" +#else +"0" +#endif +"cxx_variadic_templates\n" + +}; + +int main(int argc, char** argv) { (void)argv; return features[argc]; } diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/CXX.includecache b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/CXX.includecache new file mode 100644 index 0000000000000000000000000000000000000000..68040ae3a52b8ef1e5658d4a0e08ab2c7547bead --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/CXX.includecache @@ -0,0 +1,2460 @@ +#IncludeRegexLine: ^[ ]*[#%][ ]*(include|import)[ ]*[<"]([^">]+)([">]) + +#IncludeRegexScan: ^.*$ + +#IncludeRegexComplain: ^$ + +#IncludeRegexTransform: + +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +acl_rt.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +acl_op.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +acl_mdl.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +stdint.h +- +stddef.h +- + +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +stddef.h +- +stdint.h +- +acl_base.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +acl_rt.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +acl_base.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +acl_rt.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +stdint.h +- +stddef.h +- +acl_base.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +stdint.h +- +stddef.h +- +acl/acl.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl/acl.h +acl/acl_base.h +/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl/acl_base.h + +/home/ascend/Ascend/driver/peripheral_api.h +stdint.h +- + +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h +string +- +cstdint +- +vector +- +memory +- +ascenddk/presenter/agent/errors.h +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/ascenddk/presenter/agent/errors.h + +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h + +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h +ascenddk/presenter/agent/channel.h +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/ascenddk/presenter/agent/channel.h +ascenddk/presenter/agent/errors.h +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/ascenddk/presenter/agent/errors.h +ascenddk/presenter/agent/presenter_types.h +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/ascenddk/presenter/agent/presenter_types.h + +/home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h +string +- +cstdint +- +vector +- + +/home/ascend/ascend_ddk/arm/include/atlasutil/acl_device.h +unistd.h +- +string +- +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h +atlas_error.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h +atlas_thread_mgr.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +unistd.h +- + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h +iostream +- +atlas_utils.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +iostream +- +memory +- +thread +- +unistd.h +- +thread_safe_queue.h +/home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h +atlas_error.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +iostream +- +memory +- +thread +- +unistd.h +- +atlas_utils.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +thread_safe_queue.h +/home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +atlas_thread.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +unistd.h +- +string +- +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h +acl/ops/acl_dvpp.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/ops/acl_dvpp.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +iostream +- +vector +- +memory +- +mutex +- +unistd.h +- +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h +acl/ops/acl_dvpp.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/ops/acl_dvpp.h +atlas_error.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +atlas_type.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocap_base.h +atlas_error.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +atlas_type.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocapture.h +unistd.h +- +atlas_utils.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +atlas_videocap_base.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocap_base.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +cstdint +- +acl/acl.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/acl.h +acl/ops/acl_dvpp.h +/home/ascend/ascend_ddk/arm/include/atlasutil/acl/ops/acl_dvpp.h +atlas_utils.h +/home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + +/home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +string +- +map +- + +/home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +mutex +- +queue +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/features2d.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/features2d.hpp +opencv2/core/affine.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/affine.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/cvdef.h +opencv2/core/version.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/version.hpp +opencv2/core/base.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/base.hpp +opencv2/core/cvstd.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/cvstd.hpp +opencv2/core/traits.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/traits.hpp +opencv2/core/matx.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/matx.hpp +opencv2/core/types.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/types.hpp +opencv2/core/mat.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/mat.hpp +opencv2/core/persistence.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/persistence.hpp +opencv2/core/operations.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/operations.hpp +opencv2/core/cvstd.inl.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/cvstd.inl.hpp +opencv2/core/utility.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/utility.hpp +opencv2/core/optim.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/optim.hpp +opencv2/core/ovx.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core/ovx.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +opencv2/core.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +opencv2/core/mat.hpp +- +chrono +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +opencv2/opencv_modules.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/opencv_modules.hpp +climits +- +algorithm +- +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +opencv2/core/cvstd.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvstd.hpp +opencv2/core/neon_utils.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/neon_utils.hpp +opencv2/core/vsx_utils.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/vsx_utils.hpp +opencv2/core/check.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/check.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +opencv2/core/base.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +opencv2/core/types_c.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/types_c.h +cxcore.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cxcore.h +cxcore.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cxcore.h +opencv2/core/utility.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/utility.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core.hpp +opencv2/core/cuda_types.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cuda_types.hpp +opencv2/opencv.hpp +- +opencv2/core/cuda.inl.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cuda.inl.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +opencv2/core/cuda.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cuda.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +cv_cpu_config.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_config.h +cv_cpu_helper.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +emmintrin.h +- +pmmintrin.h +- +tmmintrin.h +- +smmintrin.h +- +nmmintrin.h +- +nmmintrin.h +- +popcntintrin.h +- +immintrin.h +- +arm_neon.h +- +immintrin.h +- +immintrin.h +- +immintrin.h +- +Intrin.h +- +arm_neon.h +- +arm_neon.h +- +arm_neon.h +- +altivec.h +- +hal/msa_macros.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +wasm_simd128.h +- +emmintrin.h +- +Intrin.h +- +arm_neon.h +- +arm_neon.h +- +altivec.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +cvconfig.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvconfig.h +limits +- +limits.h +- +opencv2/core/hal/interface.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/hal/interface.h +cv_cpu_dispatch.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +intrin.h +- +array +- +cstdint +- +stdint.h +- +stdint.h +- +opencv2/core/fast_math.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/fast_math.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +cstddef +- +cstring +- +cctype +- +string +- +algorithm +- +utility +- +cstdlib +- +cmath +- +cvstd_wrapper.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +complex +- +ostream +- +sstream +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +string +- +memory +- +type_traits +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +cmath +- +fastmath.h +- +math.h +- +emmintrin.h +- +altivec.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +cstddef +- +stddef.h +- +stdbool.h +- +cstdint +- +stdint.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +msa.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa.h +stdint.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +opencv2/core/matx.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/matx.hpp +opencv2/core/types.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/types.hpp +opencv2/core/bufferpool.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/bufferpool.hpp +type_traits +- +opencv2/core/mat.inl.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/mat.inl.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +opencv2/core/base.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/base.hpp +opencv2/core/traits.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/traits.hpp +opencv2/core/saturate.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/saturate.hpp +initializer_list +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +cstdio +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +opencv2/core/types.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/types.hpp +opencv2/core/mat.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/mat.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core.hpp +time.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +opencv2/core/fast_math.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/fast_math.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +climits +- +cfloat +- +vector +- +limits +- +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +opencv2/core/cvstd.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvstd.hpp +opencv2/core/matx.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/matx.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +ipl.h +- +ipl/ipl.h +- +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +assert.h +- +stdlib.h +- +string.h +- +float.h +- +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core.hpp +ostream +- +functional +- +mutex +- +opencv2/core/utils/instrumentation.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/utils/instrumentation.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +opencv2/core/utility.hpp +- +opencv2/core/utils/tls.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +opencv2/core/utility.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +opencv2/core/cvdef.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/opencv2/core/cvdef.h +assert.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +opencv2/dnn/dnn.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +opencv2/core.hpp +- +map +- +ostream +- +opencv2/dnn/dnn.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +vector +- +opencv2/core.hpp +- +opencv2/core/async.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/opencv2/core/async.hpp +../dnn/version.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +opencv2/dnn/dict.hpp +- +opencv2/dnn/layer.hpp +- +opencv2/dnn/dnn.inl.hpp +- +opencv2/dnn/utils/inference_engine.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +opencv2/dnn.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +opencv2/dnn.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +../dnn.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +opencv2/opencv_modules.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/opencv_modules.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/flann/miniflann.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/flann/miniflann.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/flann/miniflann.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/flann/miniflann.hpp +opencv2/flann/flann_base.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/flann/flann_base.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +kdtree_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +kdtree_single_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +kmeans_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +composite_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +linear_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +hierarchical_clustering_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +lsh_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +autotuned_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +stdlib.h +- +stdio.h +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +defines.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +stdexcept +- +ostream +- +typeinfo +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +sstream +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +ground_truth.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +index_testing.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +sampling.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +kdtree_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +kdtree_single_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +kmeans_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +composite_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +linear_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +logger.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +kdtree_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +kmeans_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +config.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +cmath +- +cstdlib +- +string.h +- +stdint.h +- +defines.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +Intrin.h +- +arm_neon.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/arm_neon.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +boost/dynamic_bitset.hpp +- +limits.h +- +dist.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +vector +- +cassert +- +cstdio +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +params.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +saving.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +all_indices.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +dist.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +algorithm +- +vector +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +algorithm +- +map +- +cassert +- +limits +- +cmath +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +dist.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +heap.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +allocator.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +random.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +saving.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +cstring +- +cassert +- +cmath +- +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +logger.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +timer.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +algorithm +- +map +- +cassert +- +cstring +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +dynamic_bitset.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +heap.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +allocator.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +random.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +saving.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +algorithm +- +map +- +cassert +- +cstring +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +heap.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +allocator.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +random.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +saving.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +algorithm +- +map +- +cassert +- +limits +- +cmath +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +dist.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +heap.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +allocator.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +random.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +saving.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +logger.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +stdio.h +- +stdarg.h +- +defines.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +algorithm +- +cassert +- +cstring +- +map +- +vector +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +heap.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +lsh_table.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +allocator.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +random.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +saving.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +algorithm +- +iostream +- +iomanip +- +limits.h +- +unordered_map +- +map +- +math.h +- +stddef.h +- +dynamic_bitset.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +stdio.h +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/opencv2/core.hpp +opencv2/flann/defines.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/opencv2/flann/defines.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +result_set.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +params.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +any.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +iostream +- +map +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +algorithm +- +cstdlib +- +vector +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +algorithm +- +cstring +- +iostream +- +limits +- +set +- +vector +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +matrix.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +random.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +cstring +- +vector +- +general.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +nn_index.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +time.h +- +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/opencv2/core.hpp +opencv2/core/utility.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/opencv2/core/utility.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/imgcodecs.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/imgcodecs.hpp +opencv2/videoio.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/videoio.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +opencv2/core/core_c.h +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/opencv2/core/core_c.h + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +float.h +- +map +- +iostream +- +opencv2/ml/ml.inl.hpp +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/objdetect/detection_based_tracker.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/objdetect/detection_based_tracker.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +opencv2/core.hpp +- +vector +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +opencv2/opencv_modules.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/opencv_modules.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/calib3d.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/calib3d.hpp +opencv2/features2d.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/features2d.hpp +opencv2/dnn.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/dnn.hpp +opencv2/flann.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/flann.hpp +opencv2/highgui.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/highgui.hpp +opencv2/imgcodecs.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/imgcodecs.hpp +opencv2/imgproc.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/imgproc.hpp +opencv2/ml.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/ml.hpp +opencv2/objdetect.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/objdetect.hpp +opencv2/photo.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/photo.hpp +opencv2/stitching.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching.hpp +opencv2/video.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/video.hpp +opencv2/videoio.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/videoio.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/imgproc.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/imgproc.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp +opencv2/features2d.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/features2d.hpp +opencv2/stitching/warpers.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/warpers.hpp +opencv2/stitching/detail/matchers.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/detail/matchers.hpp +opencv2/stitching/detail/motion_estimators.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/detail/motion_estimators.hpp +opencv2/stitching/detail/exposure_compensate.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/detail/exposure_compensate.hpp +opencv2/stitching/detail/seam_finders.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/detail/seam_finders.hpp +opencv2/stitching/detail/blenders.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/detail/blenders.hpp +opencv2/stitching/detail/camera.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/stitching/detail/camera.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +opencv2/core/cuda.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core/cuda.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +opencv2/features2d.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/features2d.hpp +opencv2/opencv_modules.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/opencv_modules.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +matchers.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +util.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +camera.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +set +- +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +opencv2/opencv_modules.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/opencv_modules.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +list +- +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +util_inl.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +queue +- +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +util.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +opencv2/core/cuda.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core/cuda.hpp +opencv2/imgproc.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/imgproc.hpp +opencv2/opencv_modules.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/opencv_modules.hpp +warpers_inl.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/opencv2/core.hpp +warpers.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +limits +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +opencv2/stitching/detail/warpers.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/opencv2/stitching/detail/warpers.hpp +string +- + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +opencv2/video/tracking.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/video/tracking.hpp +opencv2/video/background_segm.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/video/background_segm.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/opencv2/core.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/opencv2/core.hpp +opencv2/imgproc.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/opencv2/imgproc.hpp + +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +opencv2/core.hpp +/home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv2/core.hpp + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_detection.h +atlasutil/atlas_model.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_model.h +atlasutil/dvpp_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/dvpp_process.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +facial_thread_base.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_mask.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +iostream +- +string +- +dirent.h +- +memory +- +unistd.h +- +vector +- +stdint.h +- +opencv2/opencv.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/opencv.hpp +opencv2/imgproc.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/imgproc.hpp +opencv2/core/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/core/types_c.h +atlasutil/atlas_model.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_model.h +atlasutil/dvpp_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/dvpp_process.h +facial_thread_base.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_post_process.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +vector +- +stdint.h +- +atlasutil/atlas_thread.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_thread.h +atlasutil/atlas_app.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_app.h +atlasutil/dvpp_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/dvpp_process.h +facial_recognition_message.pb.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +ascenddk/presenter/agent/presenter_channel.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/ascenddk/presenter/agent/presenter_channel.h +presenter_channels.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition.h +vector +- +atlasutil/atlas_model.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_model.h +atlasutil/dvpp_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/dvpp_process.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +facial_thread_base.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +iostream +- +mutex +- +unistd.h +- +face_feature_train_mean.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +face_feature_train_std.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +atlasutil/atlas_utils.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_utils.h +opencv2/opencv.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/opencv.hpp +opencv2/opencv.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/opencv.hpp +opencv2/imgproc.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/imgproc.hpp +opencv2/core/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/core/types_c.h +opencv2/imgproc/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/opencv2/imgproc/types_c.h +opencv2/core/core.hpp +- + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_register.h +iostream +- +string +- +dirent.h +- +memory +- +unistd.h +- +vector +- +stdint.h +- +atlasutil/dvpp_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/dvpp_process.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +facial_thread_base.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +presenter_channels.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +limits +- +string +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- +google/protobuf/io/coded_stream.h +- +google/protobuf/arena.h +- +google/protobuf/arenastring.h +- +google/protobuf/generated_message_table_driven.h +- +google/protobuf/generated_message_util.h +- +google/protobuf/inlined_string_field.h +- +google/protobuf/metadata.h +- +google/protobuf/generated_message_reflection.h +- +google/protobuf/message.h +- +google/protobuf/repeated_field.h +- +google/protobuf/extension_set.h +- +google/protobuf/generated_enum_reflection.h +- +google/protobuf/unknown_field_set.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +iostream +- +string +- +vector +- +stdint.h +- +stdio.h +- +atlasutil/atlas_thread.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_thread.h +atlasutil/atlas_app.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_app.h +atlasutil/atlas_utils.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_utils.h +atlasutil/parse_config.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/parse_config.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/mind_camera.h +string +- +memory +- +atlasutil/atlas_videocapture.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/atlasutil/atlas_videocapture.h +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +facial_thread_base.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h +facial_recognition_message.pb.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +mutex +- +string +- +cstdint +- +fstream +- +iostream +- +ascenddk/presenter/agent/presenter_types.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/ascenddk/presenter/agent/presenter_types.h +ascenddk/presenter/agent/channel.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/ascenddk/presenter/agent/channel.h +ascenddk/presenter/agent/presenter_channel.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/ascenddk/presenter/agent/presenter_channel.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp +vector +- +sstream +- +unistd.h +- +fstream +- +face_detection.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp +memory +- +fstream +- +sstream +- +stdio.h +- +string.h +- +cstring +- +opencv2/opencv.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/opencv.hpp +opencv2/imgproc.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/imgproc.hpp +opencv2/core/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/core/types_c.h +opencv2/imgproc/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/imgproc/types_c.h +face_feature_mask.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.h +face_feature_train_mean.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_train_mean.h +face_feature_train_std.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_train_std.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp +face_post_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.h +memory +- +fstream +- +sstream +- +cstdio +- +cstring +- +string +- +atlasutil/parse_config.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/parse_config.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp +face_recognition.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.h +cstdint +- +unistd.h +- +memory +- +sstream +- +opencv2/opencv.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/opencv.hpp +opencv2/imgproc.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/imgproc.hpp +opencv2/video.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/video.hpp +opencv2/video/tracking.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/video/tracking.hpp +opencv2/core/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/core/types_c.h +opencv2/imgproc/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/imgproc/types_c.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp +face_register.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.h +memory +- +fstream +- +sstream +- +stdio.h +- +string.h +- +cmath +- +regex +- +iostream +- +opencv2/opencv.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/opencv.hpp +opencv2/imgproc.hpp +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/imgproc.hpp +opencv2/core/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/core/types_c.h +opencv2/imgproc/types_c.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/opencv2/imgproc/types_c.h +opencv2/core/core.hpp +- +face_recognition_params.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition_params.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc +facial_recognition_message.pb.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.h +algorithm +- +google/protobuf/stubs/common.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/extension_set.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/descriptor.h +- +google/protobuf/generated_message_reflection.h +- +google/protobuf/reflection_ops.h +- +google/protobuf/wire_format.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp +iostream +- +stdlib.h +- +dirent.h +- +thread +- +time.h +- +fstream +- +atlasutil/acl_device.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/acl_device.h +atlasutil/atlas_app.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/atlas_app.h +atlasutil/atlas_thread.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/atlas_thread.h +atlasutil/atlas_type.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/atlas_type.h +atlasutil/atlas_error.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/atlas_error.h +face_detection.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.h +face_feature_mask.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.h +face_recognition.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.h +face_post_process.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.h +face_register.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.h +mind_camera.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.h + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp +mind_camera.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.h +memory +- +iostream +- +fstream +- +sstream +- +cstdio +- +stdlib.h +- +string.h +- +time.h +- +cstring +- +chrono +- +atlasutil/parse_config.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/parse_config.h +atlasutil/atlas_app.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/atlasutil/atlas_app.h +driver/peripheral_api.h +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/driver/peripheral_api.h + +/usr/local/include/google/protobuf/arena.h +limits +- +type_traits +- +utility +- +exception +- +typeinfo +- +typeinfo +- +google/protobuf/arena_impl.h +- +google/protobuf/port.h +- +type_traits +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/arena_impl.h +atomic +- +limits +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/logging.h +- +sanitizer/asan_interface.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/arenastring.h +string +- +google/protobuf/arena.h +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/fastmem.h +- +google/protobuf/stubs/logging.h +- +google/protobuf/stubs/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/descriptor.h +memory +- +set +- +string +- +vector +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/mutex.h +- +google/protobuf/stubs/once.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/extension_set.h +algorithm +- +cassert +- +map +- +string +- +utility +- +vector +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/logging.h +- +google/protobuf/parse_context.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/port.h +- +google/protobuf/repeated_field.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/generated_enum_reflection.h +string +- +google/protobuf/generated_enum_util.h +- +google/protobuf/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/generated_enum_util.h +type_traits +- + +/usr/local/include/google/protobuf/generated_message_reflection.h +string +- +vector +- +google/protobuf/stubs/casts.h +- +google/protobuf/stubs/common.h +- +google/protobuf/generated_enum_reflection.h +- +google/protobuf/message.h +- +google/protobuf/metadata.h +- +google/protobuf/stubs/once.h +- +google/protobuf/port.h +- +google/protobuf/unknown_field_set.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/generated_message_table_driven.h +google/protobuf/map.h +- +google/protobuf/map_entry_lite.h +- +google/protobuf/map_field_lite.h +- +google/protobuf/message_lite.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/generated_message_util.h +assert.h +- +atomic +- +climits +- +string +- +vector +- +google/protobuf/stubs/common.h +- +google/protobuf/has_bits.h +- +google/protobuf/implicit_weak_message.h +- +google/protobuf/message_lite.h +- +google/protobuf/stubs/once.h +- +google/protobuf/port.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/stubs/strutil.h +- +google/protobuf/stubs/casts.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/has_bits.h +google/protobuf/stubs/common.h +- +google/protobuf/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/implicit_weak_message.h +string +- +google/protobuf/io/coded_stream.h +- +google/protobuf/arena.h +- +google/protobuf/message_lite.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/inlined_string_field.h +string +- +utility +- +google/protobuf/port.h +- +google/protobuf/stubs/stringpiece.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/io/coded_stream.h +assert.h +- +atomic +- +climits +- +string +- +utility +- +sys/param.h +- +google/protobuf/stubs/common.h +- +google/protobuf/port.h +- +google/protobuf/stubs/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/io/zero_copy_stream.h +string +- +google/protobuf/stubs/common.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/map.h +initializer_list +- +iterator +- +limits +- +set +- +utility +- +google/protobuf/stubs/common.h +- +google/protobuf/arena.h +- +google/protobuf/generated_enum_util.h +- +google/protobuf/map_type_handler.h +- +google/protobuf/stubs/hash.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/map_entry_lite.h +assert.h +- +string +- +google/protobuf/stubs/casts.h +- +google/protobuf/parse_context.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/arena.h +- +google/protobuf/arenastring.h +- +google/protobuf/generated_message_util.h +- +google/protobuf/map.h +- +google/protobuf/map_type_handler.h +- +google/protobuf/port.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/map_field_lite.h +type_traits +- +google/protobuf/parse_context.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/map.h +- +google/protobuf/map_entry_lite.h +- +google/protobuf/port.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/map_type_handler.h +google/protobuf/parse_context.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/arena.h +- +google/protobuf/wire_format_lite.h +- + +/usr/local/include/google/protobuf/message.h +iosfwd +- +string +- +type_traits +- +vector +- +google/protobuf/stubs/casts.h +- +google/protobuf/stubs/common.h +- +google/protobuf/arena.h +- +google/protobuf/descriptor.h +- +google/protobuf/message_lite.h +- +google/protobuf/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/message_lite.h +climits +- +string +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/logging.h +- +google/protobuf/arena.h +- +google/protobuf/stubs/once.h +- +google/protobuf/port.h +- +google/protobuf/stubs/strutil.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/metadata.h +google/protobuf/metadata_lite.h +- +google/protobuf/unknown_field_set.h +- + +/usr/local/include/google/protobuf/metadata_lite.h +string +- +google/protobuf/stubs/common.h +- +google/protobuf/arena.h +- +google/protobuf/generated_message_util.h +- +google/protobuf/message_lite.h +- +google/protobuf/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/parse_context.h +cstring +- +string +- +google/protobuf/io/coded_stream.h +- +google/protobuf/io/zero_copy_stream.h +- +google/protobuf/arenastring.h +- +google/protobuf/implicit_weak_message.h +- +google/protobuf/metadata_lite.h +- +google/protobuf/port.h +- +google/protobuf/repeated_field.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/stubs/strutil.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/port.h +google/protobuf/stubs/port.h +- + +/usr/local/include/google/protobuf/port_def.inc + +/usr/local/include/google/protobuf/port_undef.inc + +/usr/local/include/google/protobuf/reflection_ops.h +google/protobuf/stubs/common.h +- +google/protobuf/message.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/repeated_field.h +utility +- +algorithm +- +iterator +- +limits +- +string +- +type_traits +- +google/protobuf/stubs/logging.h +- +google/protobuf/stubs/common.h +- +google/protobuf/arena.h +- +google/protobuf/implicit_weak_message.h +- +google/protobuf/message_lite.h +- +google/protobuf/port.h +- +google/protobuf/stubs/casts.h +- +type_traits +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/casts.h +type_traits +- +google/protobuf/stubs/common.h +- + +/usr/local/include/google/protobuf/stubs/common.h +algorithm +- +iostream +- +map +- +memory +- +set +- +string +- +vector +- +google/protobuf/stubs/port.h +- +google/protobuf/stubs/macros.h +- +google/protobuf/stubs/platform_macros.h +- +exception +- +TargetConditionals.h +- +pthread.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/fastmem.h +stddef.h +- +stdio.h +- +string.h +- +google/protobuf/stubs/common.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/hash.h +string.h +- +google/protobuf/stubs/common.h +- +unordered_map +- +unordered_set +- + +/usr/local/include/google/protobuf/stubs/logging.h +google/protobuf/stubs/macros.h +- +google/protobuf/stubs/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/macros.h +google/protobuf/stubs/port.h +- + +/usr/local/include/google/protobuf/stubs/mutex.h +mutex +- +windows.h +- +google/protobuf/stubs/macros.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/once.h +mutex +- +utility +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/platform_macros.h +Availability.h +- +TargetConditionals.h +- + +/usr/local/include/google/protobuf/stubs/port.h +assert.h +- +stdlib.h +- +cstddef +- +string +- +string.h +- +inttypes.h +- +stdint.h +- +google/protobuf/stubs/platform_macros.h +- +google/protobuf/port_def.inc +- +sys/param.h +- +endian.h +- +stdlib.h +- +intrin.h +- +libkern/OSByteOrder.h +- +byteswap.h +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/stringpiece.h +assert.h +- +stddef.h +- +string.h +- +iosfwd +- +limits +- +string +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/hash.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/stubs/strutil.h +stdlib.h +- +vector +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/stringpiece.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/unknown_field_set.h +assert.h +- +string +- +vector +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/logging.h +- +google/protobuf/parse_context.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/message_lite.h +- +google/protobuf/port.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/wire_format.h +string +- +google/protobuf/stubs/common.h +- +google/protobuf/descriptor.h +- +google/protobuf/message.h +- +google/protobuf/wire_format_lite.h +- +google/protobuf/stubs/casts.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + +/usr/local/include/google/protobuf/wire_format_lite.h +string +- +google/protobuf/stubs/common.h +- +google/protobuf/stubs/logging.h +- +google/protobuf/io/coded_stream.h +- +google/protobuf/arenastring.h +- +google/protobuf/message_lite.h +- +google/protobuf/port.h +- +google/protobuf/repeated_field.h +- +google/protobuf/port_def.inc +- +google/protobuf/port_undef.inc +- + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/DependInfo.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/DependInfo.cmake new file mode 100644 index 0000000000000000000000000000000000000000..f28bfab2f28e8931e430afc0c10aab0845158dc7 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/DependInfo.cmake @@ -0,0 +1,41 @@ +# The set of languages for which implicit dependencies are needed: +set(CMAKE_DEPENDS_LANGUAGES + "CXX" + ) +# The set of files for implicit dependencies of each language: +set(CMAKE_DEPENDS_CHECK_CXX + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detection.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_feature_mask.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_post_process.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_recognition.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_register.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/facial_recognition_message.pb.cc.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/main.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp" "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/mind_camera.cpp.o" + ) +set(CMAKE_CXX_COMPILER_ID "GNU") + +# Preprocessor definitions for this target. +set(CMAKE_TARGET_DEFINITIONS_CXX + "ENABLE_BOARD_CAMARE" + "ENABLE_DVPP_INTERFACE" + ) + +# The include file search paths: +set(CMAKE_CXX_TARGET_INCLUDE_PATH + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc" + "/home/ascend/ascend_ddk/arm/include/opencv4" + "/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include" + "/home/ascend/ascend_ddk/arm/include" + "/home/ascend/ascend_ddk/arm/include/ascenddk" + "/home/ascend/Ascend" + "/home/ascend/Ascend/ascenddk" + "/usr/local/include" + ) + +# Targets to which this target links. +set(CMAKE_TARGET_LINKED_INFO_FILES + ) + +# Fortran module output directory. +set(CMAKE_Fortran_TARGET_MODULE_DIR "") diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/build.make b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/build.make new file mode 100644 index 0000000000000000000000000000000000000000..81ff78a904a490fd0bb203fdb6df13fb6eb7fc4a --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/build.make @@ -0,0 +1,302 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +# Delete rule output on recipe failure. +.DELETE_ON_ERROR: + + +#============================================================================= +# Special targets provided by cmake. + +# Disable implicit rules so canonical targets will work. +.SUFFIXES: + + +# Remove some rules from gmake that .SUFFIXES does not remove. +SUFFIXES = + +.SUFFIXES: .hpux_make_needs_suffix_list + + +# Suppress display of executed commands. +$(VERBOSE).SILENT: + + +# A target that is always out of date. +cmake_force: + +.PHONY : cmake_force + +#============================================================================= +# Set environment variables for the build. + +# The shell in which to execute make rules. +SHELL = /bin/sh + +# The CMake executable. +CMAKE_COMMAND = /usr/bin/cmake + +# The command to remove a file. +RM = /usr/bin/cmake -E remove -f + +# Escaping for special characters. +EQUALS = = + +# The top-level source directory on which CMake was run. +CMAKE_SOURCE_DIR = /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src + +# The top-level build directory on which CMake was run. +CMAKE_BINARY_DIR = /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host + +# Include any dependencies generated for this target. +include CMakeFiles/main.dir/depend.make + +# Include the progress variables for this target. +include CMakeFiles/main.dir/progress.make + +# Include the compile flags for this target's objects. +include CMakeFiles/main.dir/flags.make + +CMakeFiles/main.dir/mind_camera.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_1) "Building CXX object CMakeFiles/main.dir/mind_camera.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/mind_camera.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp + +CMakeFiles/main.dir/mind_camera.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/mind_camera.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp > CMakeFiles/main.dir/mind_camera.cpp.i + +CMakeFiles/main.dir/mind_camera.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/mind_camera.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp -o CMakeFiles/main.dir/mind_camera.cpp.s + +CMakeFiles/main.dir/mind_camera.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/mind_camera.cpp.o.requires + +CMakeFiles/main.dir/mind_camera.cpp.o.provides: CMakeFiles/main.dir/mind_camera.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/mind_camera.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/mind_camera.cpp.o.provides + +CMakeFiles/main.dir/mind_camera.cpp.o.provides.build: CMakeFiles/main.dir/mind_camera.cpp.o + + +CMakeFiles/main.dir/face_register.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_2) "Building CXX object CMakeFiles/main.dir/face_register.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/face_register.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp + +CMakeFiles/main.dir/face_register.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/face_register.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp > CMakeFiles/main.dir/face_register.cpp.i + +CMakeFiles/main.dir/face_register.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/face_register.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp -o CMakeFiles/main.dir/face_register.cpp.s + +CMakeFiles/main.dir/face_register.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/face_register.cpp.o.requires + +CMakeFiles/main.dir/face_register.cpp.o.provides: CMakeFiles/main.dir/face_register.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_register.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/face_register.cpp.o.provides + +CMakeFiles/main.dir/face_register.cpp.o.provides.build: CMakeFiles/main.dir/face_register.cpp.o + + +CMakeFiles/main.dir/face_detection.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_3) "Building CXX object CMakeFiles/main.dir/face_detection.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/face_detection.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp + +CMakeFiles/main.dir/face_detection.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/face_detection.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp > CMakeFiles/main.dir/face_detection.cpp.i + +CMakeFiles/main.dir/face_detection.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/face_detection.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp -o CMakeFiles/main.dir/face_detection.cpp.s + +CMakeFiles/main.dir/face_detection.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/face_detection.cpp.o.requires + +CMakeFiles/main.dir/face_detection.cpp.o.provides: CMakeFiles/main.dir/face_detection.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_detection.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/face_detection.cpp.o.provides + +CMakeFiles/main.dir/face_detection.cpp.o.provides.build: CMakeFiles/main.dir/face_detection.cpp.o + + +CMakeFiles/main.dir/face_feature_mask.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_4) "Building CXX object CMakeFiles/main.dir/face_feature_mask.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/face_feature_mask.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp + +CMakeFiles/main.dir/face_feature_mask.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/face_feature_mask.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp > CMakeFiles/main.dir/face_feature_mask.cpp.i + +CMakeFiles/main.dir/face_feature_mask.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/face_feature_mask.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp -o CMakeFiles/main.dir/face_feature_mask.cpp.s + +CMakeFiles/main.dir/face_feature_mask.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/face_feature_mask.cpp.o.requires + +CMakeFiles/main.dir/face_feature_mask.cpp.o.provides: CMakeFiles/main.dir/face_feature_mask.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_feature_mask.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/face_feature_mask.cpp.o.provides + +CMakeFiles/main.dir/face_feature_mask.cpp.o.provides.build: CMakeFiles/main.dir/face_feature_mask.cpp.o + + +CMakeFiles/main.dir/face_recognition.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_5) "Building CXX object CMakeFiles/main.dir/face_recognition.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/face_recognition.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp + +CMakeFiles/main.dir/face_recognition.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/face_recognition.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp > CMakeFiles/main.dir/face_recognition.cpp.i + +CMakeFiles/main.dir/face_recognition.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/face_recognition.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp -o CMakeFiles/main.dir/face_recognition.cpp.s + +CMakeFiles/main.dir/face_recognition.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/face_recognition.cpp.o.requires + +CMakeFiles/main.dir/face_recognition.cpp.o.provides: CMakeFiles/main.dir/face_recognition.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_recognition.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/face_recognition.cpp.o.provides + +CMakeFiles/main.dir/face_recognition.cpp.o.provides.build: CMakeFiles/main.dir/face_recognition.cpp.o + + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_6) "Building CXX object CMakeFiles/main.dir/facial_recognition_message.pb.cc.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/facial_recognition_message.pb.cc.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/facial_recognition_message.pb.cc.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc > CMakeFiles/main.dir/facial_recognition_message.pb.cc.i + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/facial_recognition_message.pb.cc.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc -o CMakeFiles/main.dir/facial_recognition_message.pb.cc.s + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.requires: + +.PHONY : CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.requires + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.provides: CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.provides.build +.PHONY : CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.provides + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.provides.build: CMakeFiles/main.dir/facial_recognition_message.pb.cc.o + + +CMakeFiles/main.dir/face_post_process.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_7) "Building CXX object CMakeFiles/main.dir/face_post_process.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/face_post_process.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp + +CMakeFiles/main.dir/face_post_process.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/face_post_process.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp > CMakeFiles/main.dir/face_post_process.cpp.i + +CMakeFiles/main.dir/face_post_process.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/face_post_process.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp -o CMakeFiles/main.dir/face_post_process.cpp.s + +CMakeFiles/main.dir/face_post_process.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/face_post_process.cpp.o.requires + +CMakeFiles/main.dir/face_post_process.cpp.o.provides: CMakeFiles/main.dir/face_post_process.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_post_process.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/face_post_process.cpp.o.provides + +CMakeFiles/main.dir/face_post_process.cpp.o.provides.build: CMakeFiles/main.dir/face_post_process.cpp.o + + +CMakeFiles/main.dir/main.cpp.o: CMakeFiles/main.dir/flags.make +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_8) "Building CXX object CMakeFiles/main.dir/main.cpp.o" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o CMakeFiles/main.dir/main.cpp.o -c /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp + +CMakeFiles/main.dir/main.cpp.i: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Preprocessing CXX source to CMakeFiles/main.dir/main.cpp.i" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -E /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp > CMakeFiles/main.dir/main.cpp.i + +CMakeFiles/main.dir/main.cpp.s: cmake_force + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green "Compiling CXX source to assembly CMakeFiles/main.dir/main.cpp.s" + /usr/bin/aarch64-linux-gnu-g++ $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -S /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp -o CMakeFiles/main.dir/main.cpp.s + +CMakeFiles/main.dir/main.cpp.o.requires: + +.PHONY : CMakeFiles/main.dir/main.cpp.o.requires + +CMakeFiles/main.dir/main.cpp.o.provides: CMakeFiles/main.dir/main.cpp.o.requires + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/main.cpp.o.provides.build +.PHONY : CMakeFiles/main.dir/main.cpp.o.provides + +CMakeFiles/main.dir/main.cpp.o.provides.build: CMakeFiles/main.dir/main.cpp.o + + +# Object files for target main +main_OBJECTS = \ +"CMakeFiles/main.dir/mind_camera.cpp.o" \ +"CMakeFiles/main.dir/face_register.cpp.o" \ +"CMakeFiles/main.dir/face_detection.cpp.o" \ +"CMakeFiles/main.dir/face_feature_mask.cpp.o" \ +"CMakeFiles/main.dir/face_recognition.cpp.o" \ +"CMakeFiles/main.dir/facial_recognition_message.pb.cc.o" \ +"CMakeFiles/main.dir/face_post_process.cpp.o" \ +"CMakeFiles/main.dir/main.cpp.o" + +# External object files for target main +main_EXTERNAL_OBJECTS = + +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/mind_camera.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/face_register.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/face_detection.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/face_feature_mask.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/face_recognition.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/facial_recognition_message.pb.cc.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/face_post_process.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/main.cpp.o +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/build.make +/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main: CMakeFiles/main.dir/link.txt + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --green --bold --progress-dir=/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles --progress-num=$(CMAKE_PROGRESS_9) "Linking CXX executable /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main" + $(CMAKE_COMMAND) -E cmake_link_script CMakeFiles/main.dir/link.txt --verbose=$(VERBOSE) + +# Rule to build all files generated by this target. +CMakeFiles/main.dir/build: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main + +.PHONY : CMakeFiles/main.dir/build + +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/mind_camera.cpp.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/face_register.cpp.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/face_detection.cpp.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/face_feature_mask.cpp.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/face_recognition.cpp.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/facial_recognition_message.pb.cc.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/face_post_process.cpp.o.requires +CMakeFiles/main.dir/requires: CMakeFiles/main.dir/main.cpp.o.requires + +.PHONY : CMakeFiles/main.dir/requires + +CMakeFiles/main.dir/clean: + $(CMAKE_COMMAND) -P CMakeFiles/main.dir/cmake_clean.cmake +.PHONY : CMakeFiles/main.dir/clean + +CMakeFiles/main.dir/depend: + cd /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host && $(CMAKE_COMMAND) -E cmake_depends "Unix Makefiles" /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/DependInfo.cmake --color=$(COLOR) +.PHONY : CMakeFiles/main.dir/depend + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/cmake_clean.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/cmake_clean.cmake new file mode 100644 index 0000000000000000000000000000000000000000..6b28e70ec277ce36f0405a6a45684609928e0d98 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/cmake_clean.cmake @@ -0,0 +1,17 @@ +file(REMOVE_RECURSE + "CMakeFiles/main.dir/mind_camera.cpp.o" + "CMakeFiles/main.dir/face_register.cpp.o" + "CMakeFiles/main.dir/face_detection.cpp.o" + "CMakeFiles/main.dir/face_feature_mask.cpp.o" + "CMakeFiles/main.dir/face_recognition.cpp.o" + "CMakeFiles/main.dir/facial_recognition_message.pb.cc.o" + "CMakeFiles/main.dir/face_post_process.cpp.o" + "CMakeFiles/main.dir/main.cpp.o" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main.pdb" + "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main" +) + +# Per-language clean rules from dependency scanning. +foreach(lang CXX) + include(CMakeFiles/main.dir/cmake_clean_${lang}.cmake OPTIONAL) +endforeach() diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/depend.internal b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/depend.internal new file mode 100644 index 0000000000000000000000000000000000000000..55f6c5e0f0c7dd1220cedaff27f107458425dd03 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/depend.internal @@ -0,0 +1,1096 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +CMakeFiles/main.dir/face_detection.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_detection.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp +CMakeFiles/main.dir/face_feature_mask.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_mask.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp +CMakeFiles/main.dir/face_post_process.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_post_process.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp + /usr/local/include/google/protobuf/arena.h + /usr/local/include/google/protobuf/arena_impl.h + /usr/local/include/google/protobuf/arenastring.h + /usr/local/include/google/protobuf/descriptor.h + /usr/local/include/google/protobuf/extension_set.h + /usr/local/include/google/protobuf/generated_enum_reflection.h + /usr/local/include/google/protobuf/generated_enum_util.h + /usr/local/include/google/protobuf/generated_message_reflection.h + /usr/local/include/google/protobuf/generated_message_table_driven.h + /usr/local/include/google/protobuf/generated_message_util.h + /usr/local/include/google/protobuf/has_bits.h + /usr/local/include/google/protobuf/implicit_weak_message.h + /usr/local/include/google/protobuf/inlined_string_field.h + /usr/local/include/google/protobuf/io/coded_stream.h + /usr/local/include/google/protobuf/io/zero_copy_stream.h + /usr/local/include/google/protobuf/map.h + /usr/local/include/google/protobuf/map_entry_lite.h + /usr/local/include/google/protobuf/map_field_lite.h + /usr/local/include/google/protobuf/map_type_handler.h + /usr/local/include/google/protobuf/message.h + /usr/local/include/google/protobuf/message_lite.h + /usr/local/include/google/protobuf/metadata.h + /usr/local/include/google/protobuf/metadata_lite.h + /usr/local/include/google/protobuf/parse_context.h + /usr/local/include/google/protobuf/port.h + /usr/local/include/google/protobuf/port_def.inc + /usr/local/include/google/protobuf/port_undef.inc + /usr/local/include/google/protobuf/repeated_field.h + /usr/local/include/google/protobuf/stubs/casts.h + /usr/local/include/google/protobuf/stubs/common.h + /usr/local/include/google/protobuf/stubs/fastmem.h + /usr/local/include/google/protobuf/stubs/hash.h + /usr/local/include/google/protobuf/stubs/logging.h + /usr/local/include/google/protobuf/stubs/macros.h + /usr/local/include/google/protobuf/stubs/mutex.h + /usr/local/include/google/protobuf/stubs/once.h + /usr/local/include/google/protobuf/stubs/platform_macros.h + /usr/local/include/google/protobuf/stubs/port.h + /usr/local/include/google/protobuf/stubs/stringpiece.h + /usr/local/include/google/protobuf/stubs/strutil.h + /usr/local/include/google/protobuf/unknown_field_set.h + /usr/local/include/google/protobuf/wire_format_lite.h +CMakeFiles/main.dir/face_recognition.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp +CMakeFiles/main.dir/face_register.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_register.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp + /usr/local/include/google/protobuf/arena.h + /usr/local/include/google/protobuf/arena_impl.h + /usr/local/include/google/protobuf/arenastring.h + /usr/local/include/google/protobuf/descriptor.h + /usr/local/include/google/protobuf/extension_set.h + /usr/local/include/google/protobuf/generated_enum_reflection.h + /usr/local/include/google/protobuf/generated_enum_util.h + /usr/local/include/google/protobuf/generated_message_reflection.h + /usr/local/include/google/protobuf/generated_message_table_driven.h + /usr/local/include/google/protobuf/generated_message_util.h + /usr/local/include/google/protobuf/has_bits.h + /usr/local/include/google/protobuf/implicit_weak_message.h + /usr/local/include/google/protobuf/inlined_string_field.h + /usr/local/include/google/protobuf/io/coded_stream.h + /usr/local/include/google/protobuf/io/zero_copy_stream.h + /usr/local/include/google/protobuf/map.h + /usr/local/include/google/protobuf/map_entry_lite.h + /usr/local/include/google/protobuf/map_field_lite.h + /usr/local/include/google/protobuf/map_type_handler.h + /usr/local/include/google/protobuf/message.h + /usr/local/include/google/protobuf/message_lite.h + /usr/local/include/google/protobuf/metadata.h + /usr/local/include/google/protobuf/metadata_lite.h + /usr/local/include/google/protobuf/parse_context.h + /usr/local/include/google/protobuf/port.h + /usr/local/include/google/protobuf/port_def.inc + /usr/local/include/google/protobuf/port_undef.inc + /usr/local/include/google/protobuf/repeated_field.h + /usr/local/include/google/protobuf/stubs/casts.h + /usr/local/include/google/protobuf/stubs/common.h + /usr/local/include/google/protobuf/stubs/fastmem.h + /usr/local/include/google/protobuf/stubs/hash.h + /usr/local/include/google/protobuf/stubs/logging.h + /usr/local/include/google/protobuf/stubs/macros.h + /usr/local/include/google/protobuf/stubs/mutex.h + /usr/local/include/google/protobuf/stubs/once.h + /usr/local/include/google/protobuf/stubs/platform_macros.h + /usr/local/include/google/protobuf/stubs/port.h + /usr/local/include/google/protobuf/stubs/stringpiece.h + /usr/local/include/google/protobuf/stubs/strutil.h + /usr/local/include/google/protobuf/unknown_field_set.h + /usr/local/include/google/protobuf/wire_format_lite.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc + /usr/local/include/google/protobuf/arena.h + /usr/local/include/google/protobuf/arena_impl.h + /usr/local/include/google/protobuf/arenastring.h + /usr/local/include/google/protobuf/descriptor.h + /usr/local/include/google/protobuf/extension_set.h + /usr/local/include/google/protobuf/generated_enum_reflection.h + /usr/local/include/google/protobuf/generated_enum_util.h + /usr/local/include/google/protobuf/generated_message_reflection.h + /usr/local/include/google/protobuf/generated_message_table_driven.h + /usr/local/include/google/protobuf/generated_message_util.h + /usr/local/include/google/protobuf/has_bits.h + /usr/local/include/google/protobuf/implicit_weak_message.h + /usr/local/include/google/protobuf/inlined_string_field.h + /usr/local/include/google/protobuf/io/coded_stream.h + /usr/local/include/google/protobuf/io/zero_copy_stream.h + /usr/local/include/google/protobuf/map.h + /usr/local/include/google/protobuf/map_entry_lite.h + /usr/local/include/google/protobuf/map_field_lite.h + /usr/local/include/google/protobuf/map_type_handler.h + /usr/local/include/google/protobuf/message.h + /usr/local/include/google/protobuf/message_lite.h + /usr/local/include/google/protobuf/metadata.h + /usr/local/include/google/protobuf/metadata_lite.h + /usr/local/include/google/protobuf/parse_context.h + /usr/local/include/google/protobuf/port.h + /usr/local/include/google/protobuf/port_def.inc + /usr/local/include/google/protobuf/port_undef.inc + /usr/local/include/google/protobuf/reflection_ops.h + /usr/local/include/google/protobuf/repeated_field.h + /usr/local/include/google/protobuf/stubs/casts.h + /usr/local/include/google/protobuf/stubs/common.h + /usr/local/include/google/protobuf/stubs/fastmem.h + /usr/local/include/google/protobuf/stubs/hash.h + /usr/local/include/google/protobuf/stubs/logging.h + /usr/local/include/google/protobuf/stubs/macros.h + /usr/local/include/google/protobuf/stubs/mutex.h + /usr/local/include/google/protobuf/stubs/once.h + /usr/local/include/google/protobuf/stubs/platform_macros.h + /usr/local/include/google/protobuf/stubs/port.h + /usr/local/include/google/protobuf/stubs/stringpiece.h + /usr/local/include/google/protobuf/stubs/strutil.h + /usr/local/include/google/protobuf/unknown_field_set.h + /usr/local/include/google/protobuf/wire_format.h + /usr/local/include/google/protobuf/wire_format_lite.h +CMakeFiles/main.dir/main.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h + /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h + /home/ascend/ascend_ddk/arm/include/atlasutil/acl_device.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocap_base.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocapture.h + /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_detection.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_mask.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_post_process.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_register.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/mind_camera.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp + /usr/local/include/google/protobuf/arena.h + /usr/local/include/google/protobuf/arena_impl.h + /usr/local/include/google/protobuf/arenastring.h + /usr/local/include/google/protobuf/descriptor.h + /usr/local/include/google/protobuf/extension_set.h + /usr/local/include/google/protobuf/generated_enum_reflection.h + /usr/local/include/google/protobuf/generated_enum_util.h + /usr/local/include/google/protobuf/generated_message_reflection.h + /usr/local/include/google/protobuf/generated_message_table_driven.h + /usr/local/include/google/protobuf/generated_message_util.h + /usr/local/include/google/protobuf/has_bits.h + /usr/local/include/google/protobuf/implicit_weak_message.h + /usr/local/include/google/protobuf/inlined_string_field.h + /usr/local/include/google/protobuf/io/coded_stream.h + /usr/local/include/google/protobuf/io/zero_copy_stream.h + /usr/local/include/google/protobuf/map.h + /usr/local/include/google/protobuf/map_entry_lite.h + /usr/local/include/google/protobuf/map_field_lite.h + /usr/local/include/google/protobuf/map_type_handler.h + /usr/local/include/google/protobuf/message.h + /usr/local/include/google/protobuf/message_lite.h + /usr/local/include/google/protobuf/metadata.h + /usr/local/include/google/protobuf/metadata_lite.h + /usr/local/include/google/protobuf/parse_context.h + /usr/local/include/google/protobuf/port.h + /usr/local/include/google/protobuf/port_def.inc + /usr/local/include/google/protobuf/port_undef.inc + /usr/local/include/google/protobuf/repeated_field.h + /usr/local/include/google/protobuf/stubs/casts.h + /usr/local/include/google/protobuf/stubs/common.h + /usr/local/include/google/protobuf/stubs/fastmem.h + /usr/local/include/google/protobuf/stubs/hash.h + /usr/local/include/google/protobuf/stubs/logging.h + /usr/local/include/google/protobuf/stubs/macros.h + /usr/local/include/google/protobuf/stubs/mutex.h + /usr/local/include/google/protobuf/stubs/once.h + /usr/local/include/google/protobuf/stubs/platform_macros.h + /usr/local/include/google/protobuf/stubs/port.h + /usr/local/include/google/protobuf/stubs/stringpiece.h + /usr/local/include/google/protobuf/stubs/strutil.h + /usr/local/include/google/protobuf/unknown_field_set.h + /usr/local/include/google/protobuf/wire_format_lite.h +CMakeFiles/main.dir/mind_camera.cpp.o + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h + /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h + /home/ascend/Ascend/driver/peripheral_api.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocap_base.h + /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocapture.h + /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h + /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp + /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/mind_camera.h + /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/depend.make b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/depend.make new file mode 100644 index 0000000000000000000000000000000000000000..aeddc3655bf6f69a5d4284379c34c778254b5cc2 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/depend.make @@ -0,0 +1,1096 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_detection.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +CMakeFiles/main.dir/face_detection.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_detection.cpp + +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_mask.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +CMakeFiles/main.dir/face_feature_mask.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_feature_mask.cpp + +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_post_process.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h +CMakeFiles/main.dir/face_post_process.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_post_process.cpp +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/arena.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/arena_impl.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/arenastring.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/descriptor.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/extension_set.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/generated_enum_reflection.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/generated_enum_util.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/generated_message_reflection.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/generated_message_table_driven.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/generated_message_util.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/has_bits.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/implicit_weak_message.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/inlined_string_field.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/io/coded_stream.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/io/zero_copy_stream.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/map.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/map_entry_lite.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/map_field_lite.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/map_type_handler.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/message.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/message_lite.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/metadata.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/metadata_lite.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/parse_context.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/port.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/port_def.inc +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/port_undef.inc +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/repeated_field.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/casts.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/common.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/fastmem.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/hash.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/logging.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/macros.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/mutex.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/once.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/platform_macros.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/port.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/stringpiece.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/stubs/strutil.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/unknown_field_set.h +CMakeFiles/main.dir/face_post_process.cpp.o: /usr/local/include/google/protobuf/wire_format_lite.h + +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +CMakeFiles/main.dir/face_recognition.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_recognition.cpp + +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_register.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h +CMakeFiles/main.dir/face_register.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/face_register.cpp +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/arena.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/arena_impl.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/arenastring.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/descriptor.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/extension_set.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/generated_enum_reflection.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/generated_enum_util.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/generated_message_reflection.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/generated_message_table_driven.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/generated_message_util.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/has_bits.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/implicit_weak_message.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/inlined_string_field.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/io/coded_stream.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/io/zero_copy_stream.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/map.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/map_entry_lite.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/map_field_lite.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/map_type_handler.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/message.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/message_lite.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/metadata.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/metadata_lite.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/parse_context.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/port.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/port_def.inc +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/port_undef.inc +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/repeated_field.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/casts.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/common.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/fastmem.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/hash.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/logging.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/macros.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/mutex.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/once.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/platform_macros.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/port.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/stringpiece.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/stubs/strutil.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/unknown_field_set.h +CMakeFiles/main.dir/face_register.cpp.o: /usr/local/include/google/protobuf/wire_format_lite.h + +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/facial_recognition_message.pb.cc +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/arena.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/arena_impl.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/arenastring.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/descriptor.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/extension_set.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/generated_enum_reflection.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/generated_enum_util.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/generated_message_reflection.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/generated_message_table_driven.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/generated_message_util.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/has_bits.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/implicit_weak_message.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/inlined_string_field.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/io/coded_stream.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/io/zero_copy_stream.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/map.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/map_entry_lite.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/map_field_lite.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/map_type_handler.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/message.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/message_lite.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/metadata.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/metadata_lite.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/parse_context.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/port.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/port_def.inc +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/port_undef.inc +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/reflection_ops.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/repeated_field.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/casts.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/common.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/fastmem.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/hash.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/logging.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/macros.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/mutex.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/once.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/platform_macros.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/port.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/stringpiece.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/stubs/strutil.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/unknown_field_set.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/wire_format.h +CMakeFiles/main.dir/facial_recognition_message.pb.cc.o: /usr/local/include/google/protobuf/wire_format_lite.h + +CMakeFiles/main.dir/main.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/channel.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/errors.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_channel.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/ascenddk/presenter/agent/presenter_types.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/acl_device.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_model.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocap_base.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocapture.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/dvpp_process.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_detection.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_mask.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_post_process.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_register.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_recognition_message.pb.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/mind_camera.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/presenter_channels.h +CMakeFiles/main.dir/main.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/main.cpp +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/arena.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/arena_impl.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/arenastring.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/descriptor.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/extension_set.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/generated_enum_reflection.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/generated_enum_util.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/generated_message_reflection.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/generated_message_table_driven.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/generated_message_util.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/has_bits.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/implicit_weak_message.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/inlined_string_field.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/io/coded_stream.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/io/zero_copy_stream.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/map.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/map_entry_lite.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/map_field_lite.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/map_type_handler.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/message.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/message_lite.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/metadata.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/metadata_lite.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/parse_context.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/port.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/port_def.inc +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/port_undef.inc +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/repeated_field.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/casts.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/common.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/fastmem.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/hash.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/logging.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/macros.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/mutex.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/once.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/platform_macros.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/port.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/stringpiece.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/stubs/strutil.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/unknown_field_set.h +CMakeFiles/main.dir/main.cpp.o: /usr/local/include/google/protobuf/wire_format_lite.h + +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_base.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_mdl.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_op.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/acl_rt.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include/acl/ops/acl_dvpp.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/Ascend/driver/peripheral_api.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_app.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_error.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_thread_mgr.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_type.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_utils.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocap_base.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/atlas_videocapture.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/parse_config.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/atlasutil/thread_safe_queue.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/calib3d.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/affine.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/async.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/base.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/bufferpool.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/check.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/core_c.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda.inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cuda_types.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_dispatch.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cv_cpu_helper.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvdef.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd.inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/cvstd_wrapper.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/fast_math.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/interface.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/hal/msa_macros.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/mat.inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/matx.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/neon_utils.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/operations.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/optim.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/ovx.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/persistence.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/saturate.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/traits.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/types_c.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utility.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/instrumentation.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/utils/tls.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/version.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/core/vsx_utils.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dict.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/dnn.inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/layer.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/utils/inference_engine.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/dnn/version.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/features2d.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/all_indices.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/allocator.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/any.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/autotuned_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/composite_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/config.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/defines.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dist.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/dynamic_bitset.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/flann_base.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/general.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/ground_truth.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/heap.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/hierarchical_clustering_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/index_testing.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kdtree_single_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/kmeans_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/linear_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/logger.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/lsh_table.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/matrix.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/miniflann.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/nn_index.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/params.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/random.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/result_set.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/sampling.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/saving.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/flann/timer.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/highgui.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgcodecs.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/imgproc/types_c.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/ml/ml.inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/objdetect/detection_based_tracker.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/opencv_modules.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/photo.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/blenders.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/camera.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/exposure_compensate.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/matchers.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/motion_estimators.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/seam_finders.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/util_inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/detail/warpers_inl.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/stitching/warpers.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/background_segm.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/video/tracking.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/ascend_ddk/arm/include/opencv4/opencv2/videoio.hpp +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_mean.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_feature_train_std.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/face_recognition_params.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/facial_thread_base.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc/mind_camera.h +CMakeFiles/main.dir/mind_camera.cpp.o: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/mind_camera.cpp + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detect.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detect.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..e852b267f2b5c6e8bea25e3154122de4e325cc4b Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detect.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detection.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detection.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..4e9e519587f697c288dae73599c9a6f143a01026 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_detection.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_feature_mask.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_feature_mask.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..f34be98453331ad8aa9e9614cc0ecd17d3794c42 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_feature_mask.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_post_process.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_post_process.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..226500c84fa00ef63fbc4f12a59bba5702bc0750 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_post_process.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_recognition.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_recognition.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..9f1afe31e08a599a430d0b6c0fc742715265a44b Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_recognition.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_register.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_register.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..28bf754b4bace2f098d5b0658752d0e2f9748835 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/face_register.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/facial_recognition_message.pb.cc.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/facial_recognition_message.pb.cc.o new file mode 100644 index 0000000000000000000000000000000000000000..69059e9de4057a46ac92eb7d2f18dd002c5c5997 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/facial_recognition_message.pb.cc.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/flags.make b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/flags.make new file mode 100644 index 0000000000000000000000000000000000000000..c5638db5dfbf0dfc7bba701ddd2c449ca52e53c9 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/flags.make @@ -0,0 +1,10 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +# compile CXX with /usr/bin/aarch64-linux-gnu-g++ +CXX_FLAGS = -std=c++11 + +CXX_DEFINES = -DENABLE_BOARD_CAMARE -DENABLE_DVPP_INTERFACE + +CXX_INCLUDES = -I/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src/../inc -I/home/ascend/ascend_ddk/arm/include/opencv4 -I/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/include -I/home/ascend/ascend_ddk/arm/include -I/home/ascend/ascend_ddk/arm/include/ascenddk -I/home/ascend/Ascend -I/home/ascend/Ascend/ascenddk -I/usr/local/include + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/link.txt b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/link.txt new file mode 100644 index 0000000000000000000000000000000000000000..4578753df2a41fe956e6d469e4d3841b312320c2 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/link.txt @@ -0,0 +1 @@ +/usr/bin/aarch64-linux-gnu-g++ CMakeFiles/main.dir/mind_camera.cpp.o CMakeFiles/main.dir/face_register.cpp.o CMakeFiles/main.dir/face_detection.cpp.o CMakeFiles/main.dir/face_feature_mask.cpp.o CMakeFiles/main.dir/face_recognition.cpp.o CMakeFiles/main.dir/facial_recognition_message.pb.cc.o CMakeFiles/main.dir/face_post_process.cpp.o CMakeFiles/main.dir/main.cpp.o -o /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main -L/usr/local/lib -L/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/acllib/lib64/stub -L/home/ascend/ascend_ddk/arm/lib -L/home/ascend/Ascend/driver -L/home/ascend/Ascend -L/home/ascend/Ascend/ascend-toolkit/latest/arm64-linux/atc/lib64 -lascendcl -lacl_dvpp -lstdc++ -lc_sec -lslog -latlasutil -lavcodec -lavformat -lavdevice -lavutil -lswresample -lavfilter -lswscale -lmedia_mini -lopencv_highgui -lopencv_core -lopencv_imgproc -lopencv_imgcodecs -lopencv_calib3d -lopencv_flann -lopencv_features2d -lopencv_videoio -lprotobuf -lpresenteragent -lpthread diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/main.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/main.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..36ebfbfc1deb655c995b14a88999c40852bd6238 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/main.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/mind_camera.cpp.o b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/mind_camera.cpp.o new file mode 100644 index 0000000000000000000000000000000000000000..1b08e2b58358f0c76d743f6db1e2b473d5817901 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/mind_camera.cpp.o differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/progress.make b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/progress.make new file mode 100644 index 0000000000000000000000000000000000000000..d4f6ce35d26a4136a4edb4873325a1cadb14ccfe --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/main.dir/progress.make @@ -0,0 +1,10 @@ +CMAKE_PROGRESS_1 = 1 +CMAKE_PROGRESS_2 = 2 +CMAKE_PROGRESS_3 = 3 +CMAKE_PROGRESS_4 = 4 +CMAKE_PROGRESS_5 = 5 +CMAKE_PROGRESS_6 = 6 +CMAKE_PROGRESS_7 = 7 +CMAKE_PROGRESS_8 = 8 +CMAKE_PROGRESS_9 = 9 + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/progress.marks b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/progress.marks new file mode 100644 index 0000000000000000000000000000000000000000..ec635144f60048986bc560c5576355344005e6e7 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/progress.marks @@ -0,0 +1 @@ +9 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/Makefile b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..ceb80cde3b356de8df51993109289e113375195d --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/Makefile @@ -0,0 +1,438 @@ +# CMAKE generated file: DO NOT EDIT! +# Generated by "Unix Makefiles" Generator, CMake Version 3.10 + +# Default target executed when no arguments are given to make. +default_target: all + +.PHONY : default_target + +# Allow only one "make -f Makefile2" at a time, but pass parallelism. +.NOTPARALLEL: + + +#============================================================================= +# Special targets provided by cmake. + +# Disable implicit rules so canonical targets will work. +.SUFFIXES: + + +# Remove some rules from gmake that .SUFFIXES does not remove. +SUFFIXES = + +.SUFFIXES: .hpux_make_needs_suffix_list + + +# Suppress display of executed commands. +$(VERBOSE).SILENT: + + +# A target that is always out of date. +cmake_force: + +.PHONY : cmake_force + +#============================================================================= +# Set environment variables for the build. + +# The shell in which to execute make rules. +SHELL = /bin/sh + +# The CMake executable. +CMAKE_COMMAND = /usr/bin/cmake + +# The command to remove a file. +RM = /usr/bin/cmake -E remove -f + +# Escaping for special characters. +EQUALS = = + +# The top-level source directory on which CMake was run. +CMAKE_SOURCE_DIR = /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src + +# The top-level build directory on which CMake was run. +CMAKE_BINARY_DIR = /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host + +#============================================================================= +# Targets provided globally by CMake. + +# Special rule for the target install/strip +install/strip: preinstall + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing the project stripped..." + /usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake +.PHONY : install/strip + +# Special rule for the target install/strip +install/strip/fast: preinstall/fast + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing the project stripped..." + /usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake +.PHONY : install/strip/fast + +# Special rule for the target edit_cache +edit_cache: + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "No interactive CMake dialog available..." + /usr/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available. +.PHONY : edit_cache + +# Special rule for the target edit_cache +edit_cache/fast: edit_cache + +.PHONY : edit_cache/fast + +# Special rule for the target rebuild_cache +rebuild_cache: + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..." + /usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) +.PHONY : rebuild_cache + +# Special rule for the target rebuild_cache +rebuild_cache/fast: rebuild_cache + +.PHONY : rebuild_cache/fast + +# Special rule for the target list_install_components +list_install_components: + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Available install components are: \"Unspecified\"" +.PHONY : list_install_components + +# Special rule for the target list_install_components +list_install_components/fast: list_install_components + +.PHONY : list_install_components/fast + +# Special rule for the target install/local +install/local: preinstall + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing only the local directory..." + /usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake +.PHONY : install/local + +# Special rule for the target install/local +install/local/fast: preinstall/fast + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing only the local directory..." + /usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake +.PHONY : install/local/fast + +# Special rule for the target install +install: preinstall + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Install the project..." + /usr/bin/cmake -P cmake_install.cmake +.PHONY : install + +# Special rule for the target install +install/fast: preinstall/fast + @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Install the project..." + /usr/bin/cmake -P cmake_install.cmake +.PHONY : install/fast + +# The main all target +all: cmake_check_build_system + $(CMAKE_COMMAND) -E cmake_progress_start /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles/progress.marks + $(MAKE) -f CMakeFiles/Makefile2 all + $(CMAKE_COMMAND) -E cmake_progress_start /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/CMakeFiles 0 +.PHONY : all + +# The main clean target +clean: + $(MAKE) -f CMakeFiles/Makefile2 clean +.PHONY : clean + +# The main clean target +clean/fast: clean + +.PHONY : clean/fast + +# Prepare targets for installation. +preinstall: all + $(MAKE) -f CMakeFiles/Makefile2 preinstall +.PHONY : preinstall + +# Prepare targets for installation. +preinstall/fast: + $(MAKE) -f CMakeFiles/Makefile2 preinstall +.PHONY : preinstall/fast + +# clear depends +depend: + $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1 +.PHONY : depend + +#============================================================================= +# Target rules for targets named main + +# Build rule for target. +main: cmake_check_build_system + $(MAKE) -f CMakeFiles/Makefile2 main +.PHONY : main + +# fast build rule for target. +main/fast: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/build +.PHONY : main/fast + +face_detection.o: face_detection.cpp.o + +.PHONY : face_detection.o + +# target to build an object file +face_detection.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_detection.cpp.o +.PHONY : face_detection.cpp.o + +face_detection.i: face_detection.cpp.i + +.PHONY : face_detection.i + +# target to preprocess a source file +face_detection.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_detection.cpp.i +.PHONY : face_detection.cpp.i + +face_detection.s: face_detection.cpp.s + +.PHONY : face_detection.s + +# target to generate assembly for a file +face_detection.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_detection.cpp.s +.PHONY : face_detection.cpp.s + +face_feature_mask.o: face_feature_mask.cpp.o + +.PHONY : face_feature_mask.o + +# target to build an object file +face_feature_mask.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_feature_mask.cpp.o +.PHONY : face_feature_mask.cpp.o + +face_feature_mask.i: face_feature_mask.cpp.i + +.PHONY : face_feature_mask.i + +# target to preprocess a source file +face_feature_mask.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_feature_mask.cpp.i +.PHONY : face_feature_mask.cpp.i + +face_feature_mask.s: face_feature_mask.cpp.s + +.PHONY : face_feature_mask.s + +# target to generate assembly for a file +face_feature_mask.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_feature_mask.cpp.s +.PHONY : face_feature_mask.cpp.s + +face_post_process.o: face_post_process.cpp.o + +.PHONY : face_post_process.o + +# target to build an object file +face_post_process.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_post_process.cpp.o +.PHONY : face_post_process.cpp.o + +face_post_process.i: face_post_process.cpp.i + +.PHONY : face_post_process.i + +# target to preprocess a source file +face_post_process.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_post_process.cpp.i +.PHONY : face_post_process.cpp.i + +face_post_process.s: face_post_process.cpp.s + +.PHONY : face_post_process.s + +# target to generate assembly for a file +face_post_process.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_post_process.cpp.s +.PHONY : face_post_process.cpp.s + +face_recognition.o: face_recognition.cpp.o + +.PHONY : face_recognition.o + +# target to build an object file +face_recognition.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_recognition.cpp.o +.PHONY : face_recognition.cpp.o + +face_recognition.i: face_recognition.cpp.i + +.PHONY : face_recognition.i + +# target to preprocess a source file +face_recognition.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_recognition.cpp.i +.PHONY : face_recognition.cpp.i + +face_recognition.s: face_recognition.cpp.s + +.PHONY : face_recognition.s + +# target to generate assembly for a file +face_recognition.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_recognition.cpp.s +.PHONY : face_recognition.cpp.s + +face_register.o: face_register.cpp.o + +.PHONY : face_register.o + +# target to build an object file +face_register.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_register.cpp.o +.PHONY : face_register.cpp.o + +face_register.i: face_register.cpp.i + +.PHONY : face_register.i + +# target to preprocess a source file +face_register.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_register.cpp.i +.PHONY : face_register.cpp.i + +face_register.s: face_register.cpp.s + +.PHONY : face_register.s + +# target to generate assembly for a file +face_register.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/face_register.cpp.s +.PHONY : face_register.cpp.s + +facial_recognition_message.pb.o: facial_recognition_message.pb.cc.o + +.PHONY : facial_recognition_message.pb.o + +# target to build an object file +facial_recognition_message.pb.cc.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/facial_recognition_message.pb.cc.o +.PHONY : facial_recognition_message.pb.cc.o + +facial_recognition_message.pb.i: facial_recognition_message.pb.cc.i + +.PHONY : facial_recognition_message.pb.i + +# target to preprocess a source file +facial_recognition_message.pb.cc.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/facial_recognition_message.pb.cc.i +.PHONY : facial_recognition_message.pb.cc.i + +facial_recognition_message.pb.s: facial_recognition_message.pb.cc.s + +.PHONY : facial_recognition_message.pb.s + +# target to generate assembly for a file +facial_recognition_message.pb.cc.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/facial_recognition_message.pb.cc.s +.PHONY : facial_recognition_message.pb.cc.s + +main.o: main.cpp.o + +.PHONY : main.o + +# target to build an object file +main.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/main.cpp.o +.PHONY : main.cpp.o + +main.i: main.cpp.i + +.PHONY : main.i + +# target to preprocess a source file +main.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/main.cpp.i +.PHONY : main.cpp.i + +main.s: main.cpp.s + +.PHONY : main.s + +# target to generate assembly for a file +main.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/main.cpp.s +.PHONY : main.cpp.s + +mind_camera.o: mind_camera.cpp.o + +.PHONY : mind_camera.o + +# target to build an object file +mind_camera.cpp.o: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/mind_camera.cpp.o +.PHONY : mind_camera.cpp.o + +mind_camera.i: mind_camera.cpp.i + +.PHONY : mind_camera.i + +# target to preprocess a source file +mind_camera.cpp.i: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/mind_camera.cpp.i +.PHONY : mind_camera.cpp.i + +mind_camera.s: mind_camera.cpp.s + +.PHONY : mind_camera.s + +# target to generate assembly for a file +mind_camera.cpp.s: + $(MAKE) -f CMakeFiles/main.dir/build.make CMakeFiles/main.dir/mind_camera.cpp.s +.PHONY : mind_camera.cpp.s + +# Help Target +help: + @echo "The following are some of the valid targets for this Makefile:" + @echo "... all (the default if no target is provided)" + @echo "... clean" + @echo "... depend" + @echo "... install/strip" + @echo "... edit_cache" + @echo "... main" + @echo "... rebuild_cache" + @echo "... list_install_components" + @echo "... install/local" + @echo "... install" + @echo "... face_detection.o" + @echo "... face_detection.i" + @echo "... face_detection.s" + @echo "... face_feature_mask.o" + @echo "... face_feature_mask.i" + @echo "... face_feature_mask.s" + @echo "... face_post_process.o" + @echo "... face_post_process.i" + @echo "... face_post_process.s" + @echo "... face_recognition.o" + @echo "... face_recognition.i" + @echo "... face_recognition.s" + @echo "... face_register.o" + @echo "... face_register.i" + @echo "... face_register.s" + @echo "... facial_recognition_message.pb.o" + @echo "... facial_recognition_message.pb.i" + @echo "... facial_recognition_message.pb.s" + @echo "... main.o" + @echo "... main.i" + @echo "... main.s" + @echo "... mind_camera.o" + @echo "... mind_camera.i" + @echo "... mind_camera.s" +.PHONY : help + + + +#============================================================================= +# Special targets to cleanup operation of make. + +# Special rule to run CMake to check the build system integrity. +# No rule that depends on this can have commands that come from listfiles +# because they might be regenerated. +cmake_check_build_system: + $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0 +.PHONY : cmake_check_build_system + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/cmake_install.cmake b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/cmake_install.cmake new file mode 100644 index 0000000000000000000000000000000000000000..33ab6933d23502bb4cd1e7747513afeeff5b48fe --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/cmake_install.cmake @@ -0,0 +1,59 @@ +# Install script for directory: /home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/src + +# Set the install prefix +if(NOT DEFINED CMAKE_INSTALL_PREFIX) + set(CMAKE_INSTALL_PREFIX "/usr/local") +endif() +string(REGEX REPLACE "/$" "" CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}") + +# Set the install configuration name. +if(NOT DEFINED CMAKE_INSTALL_CONFIG_NAME) + if(BUILD_TYPE) + string(REGEX REPLACE "^[^A-Za-z0-9_]+" "" + CMAKE_INSTALL_CONFIG_NAME "${BUILD_TYPE}") + else() + set(CMAKE_INSTALL_CONFIG_NAME "") + endif() + message(STATUS "Install configuration: \"${CMAKE_INSTALL_CONFIG_NAME}\"") +endif() + +# Set the component getting installed. +if(NOT CMAKE_INSTALL_COMPONENT) + if(COMPONENT) + message(STATUS "Install component: \"${COMPONENT}\"") + set(CMAKE_INSTALL_COMPONENT "${COMPONENT}") + else() + set(CMAKE_INSTALL_COMPONENT) + endif() +endif() + +# Install shared libraries without execute permission? +if(NOT DEFINED CMAKE_INSTALL_SO_NO_EXE) + set(CMAKE_INSTALL_SO_NO_EXE "1") +endif() + +# Is this installation the result of a crosscompile? +if(NOT DEFINED CMAKE_CROSSCOMPILING) + set(CMAKE_CROSSCOMPILING "FALSE") +endif() + +if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT) + file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/../../../out" TYPE EXECUTABLE FILES "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main") + if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/../../../out/main" AND + NOT IS_SYMLINK "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/../../../out/main") + if(CMAKE_INSTALL_DO_STRIP) + execute_process(COMMAND "/usr/bin/strip" "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/../../../out/main") + endif() + endif() +endif() + +if(CMAKE_INSTALL_COMPONENT) + set(CMAKE_INSTALL_MANIFEST "install_manifest_${CMAKE_INSTALL_COMPONENT}.txt") +else() + set(CMAKE_INSTALL_MANIFEST "install_manifest.txt") +endif() + +string(REPLACE ";" "\n" CMAKE_INSTALL_MANIFEST_CONTENT + "${CMAKE_INSTALL_MANIFEST_FILES}") +file(WRITE "/home/ascend/work/samples/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/build/intermediates/host/${CMAKE_INSTALL_MANIFEST}" + "${CMAKE_INSTALL_MANIFEST_CONTENT}") diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/data/.keep b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/data/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_detection.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_detection.h new file mode 100644 index 0000000000000000000000000000000000000000..72cb6d2674df02f8eca0014d3c47dc2b9412c97c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_detection.h @@ -0,0 +1,123 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef FACE_DETECTION_ENGINE_H_ +#define FACE_DETECTION_ENGINE_H_ + +#include "atlasutil/atlas_model.h" +#include "atlasutil/dvpp_process.h" + +#include "face_recognition_params.h" +#include "facial_thread_base.h" + +/** + * @brief: Face detection class + */ +class FaceDetection : public FacialThreadBase { +public: + /** + * @brief: constructor + * @param [in]: configFile: App config file + */ + FaceDetection(const std::string& configFile); + + /** + * @brief: destruction function + */ + ~FaceDetection(); + + /** + * @brief: Face detection thread init function + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed, the thread will exit + */ + AtlasError Init(); + + /** + * @brief: The message process entry of face detection thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError Process(int msgId, std::shared_ptr msgData); + +private: + /** + * @brief: Detect face data in image from MSG_FRAME_DATA and + * MSG_FACE_REG_IMAGE message, and send inference data + * to next(face feature mask) thread + * @param [in]: imageHandle: message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError FrameDataMsgProcess( + std::shared_ptr imageHandle); + + /** + * @brief: check inference results is valid or not + * @param [in]: resultItem: one face item of inference result + * @return: false:invalid, true: valid + */ + bool IsValidResults(float* resultItem); + + /** + * @brief: Correct box coordinate ratio to [0.0, 1.0] + * @param [in]: ratio: coordinate ratio + * @return: ratio in [0.0, 1.0], otherwise + * when ratio less than zero, then return zero + * when ratio more than one, then return one + */ + float CorrectionRatio(float ratio); + + /** + * @brief: Model inference result process, parse face position data + * @param [in/out]: recogInfo: engine transform image + * @param [in]: inferenceOutput: model inference output + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError PostProcess( + std::shared_ptr recogInfo, + std::vector& inferenceOutput); + +private: + AtlasModel model_; + DvppProcess dvpp_; +}; + +#endif /* FACE_DETECTION_ENGINE_H_ */ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_mask.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_mask.h new file mode 100644 index 0000000000000000000000000000000000000000..e134fe81c00b0b6df3a543a456a1bfb4d94500de --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_mask.h @@ -0,0 +1,252 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ +#ifndef FACE_FEATURE_MASK_ENGINE_H_ +#define FACE_FEATURE_MASK_ENGINE_H_ + +#include "face_recognition_params.h" +#include +#include +#include +#include +#include +#include +#include + +#include "opencv2/opencv.hpp" +#include "opencv2/imgproc.hpp" +#include "opencv2/core/types_c.h" + +#include "atlasutil/atlas_model.h" +#include "atlasutil/dvpp_process.h" + +#include "facial_thread_base.h" + +//Define the face feature position +enum FaceFeaturePos { + kLeftEyeX, + kLeftEyeY, + kRightEyeX, + kRightEyeY, + kNoseX, + kNoseY, + kLeftMouthX, + kLeftMouthY, + kRightMouthX, + kRightMouthY +}; + +/** + * @brief: Face feature extract class + */ +class FaceFeatureMask : public FacialThreadBase { +public: + /** + * @brief: constructor + * @param [in]: configFile: App config file + */ + FaceFeatureMask(const std::string& configFile); + + /** + * @brief: destruction function + */ + ~FaceFeatureMask(); + + /** + * @brief: Face feature mask thread init function + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed. The thread will exit + */ + AtlasError Init(); + + /** + * @brief: The message process entry of face feature mask thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError Process(int msgId, std::shared_ptr msgData); + +private: + /** + * @brief: Init the normlized mean and std value + * @param [in]: None + * @return: Whether init success + */ + AtlasError InitNormlizedData(); + + /** + * @brief: Crop all faces area from original image + * @param [out]: faceImgs: all faces area image data + * @param [in]: orgImg: the original image data + * @return: ATLAS_OK: crop success + * ATLAS_ERROR: crop failed + */ + AtlasError Crop(std::vector &faceImgs, ImageData &orgImg); + + /** + * @brief: Resize all cropped image + * @param [out]: resizedImgs: all face image after resize + * @param [in]: faceImgs: all cropped face image + * @return: ATLAS_OK: resize success + * ATLAS_ERROR: resize failed + */ + AtlasError Resize(std::vector &resizedImgs, + std::vector &faceImgs); + + /** + * @brief: Transform the image from resized YUV images by dvpp + * to opencv BGR mat + * @param [out]: bgrImgs BGR images after transform + * @param [in]: resizedImgs resized YUV images by dvpp + * @return: ATLAS_OK: transform success + * ATLAS_ERROR: transform failed + */ + AtlasError Decode(std::vector &bgrImgs, + std::vector &resizedImgs); + + /** + * @brief: Transform the image from (0,255) to little number, + * invoke the opencv's interface to do the normalization, + * sub mean and divide std. + * @param [in/out]: bgrImage The BGR image data + * @return: ATLAS_OK: normalize success + * ATLAS_ERROR: normalize failed + */ + AtlasError Normalize(std::vector &bgrImage); + + /** + * @brief: Copy one batch preprocessed images to + * device buffer for inference later. + * @param [out]: buffer: device buffer for store preprocess images + * @param [in]: bufferSize: device buffer size + * @param [in]: images: preprocessed images data + * @param [in]: batchIdx: current batch index + * @return: > 0: image number of copped + * -1: copy failed + */ + int CopyOneBatchImages(uint8_t* buffer, uint32_t bufferSize, + std::vector& images, int batchIdx); + + /** + * @brief: Copy one image to device buffer with NCWH. + * @param [out]: buffer: device buffer for store preprocess images + * @param [in]: bufferSize: device buffer size + * @param [in]: image: the preprocessed image to copy + * @return: > 0: data size of copped + * -1: copy failed + */ + int CopyImageMatData(uint8_t* buffer, uint32_t bufferSize, cv::Mat& image); + + /** + * @brief: Resize rgb opencv mat image data + * @param [out]: dest: the image data after resize + * @param [in]: src: origin imaga data + * @param [in]: width: resize width + * @param [in]: height: resize height + * @return: None + */ + void BgrResize(cv::Mat& dest, cv::Mat& src, + uint32_t width, uint32_t height); + + /** + * @brief: Inference the preprocessed face images data + * @param [in]: normalizedImgs: the preprocessed image data + * @param [in]: faceImgs: the face images data cropped + * from origin image + * @return: ATLAS_OK: normalize success + * ATLAS_ERROR: normalize failed + */ + AtlasError Inference(std::vector &normalizedImgs, + std::vector &faceImgs); + + /** + * @brief: Parse face feature point coordinate from model + * inference output data + * @param [out]: faceFeature: face feature point coordinate + * @param [in]: facePosition: model inference output data + * @return: None + */ + void ParseFacePosition(FaceFeature *faceFeature, int *facePosition); + + /** + * @brief: Preprocess image for face feature inference, include crop + * faces in image, resize the cropped image, transform to bgr, + * normalinze, and copy to acl device + * @param [out]: processedImgs: preprocessed face images data + * @param [in]: recogInfo: message data from last(face detection) thread + * @return: None + */ + AtlasError PreProcess(std::vector& processedImgs, + std::shared_ptr &recogInfo); + + + /** + * @brief: Process model inference output of one batch face images + * @param [in]: inferenceOutput: model inference output + * @param [in]: startIdx: begin index in the faceImgs + * @param [in]: faceCnt: inference image number + * @param [in]: faceImgs: face images + */ + AtlasError PostProcess(std::vector& inferenceOutput, + int startIdx, int faceCnt, + std::vector& faceImgs); + + /** + * @brief: Process MSG_FACE_DETECT_DATA message + * @param [in]: recogInfo: message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError FaceDetectMsgProcess( + shared_ptr recogInfo); + + +private: + AtlasModel model_; + DvppProcess dvpp_; + int32_t batchSize_; + // Mean value after trained + cv::Mat trainMean_; + // Std value after trained + cv::Mat trainStd_; + uint32_t inputSize_; + uint8_t* inputBuf_; + int *facePositionBuf_; +}; + +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_train_mean.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_train_mean.h new file mode 100644 index 0000000000000000000000000000000000000000..943f4d06e0f376dff83c8255fd505051f109c88a --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_train_mean.h @@ -0,0 +1,156 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +#ifndef FACE_FEATURE_MASK_MEAN_H_ +#define FACE_FEATURE_MASK_MEAN_H_ +const float kTrainMean[4800] = { + 69, 81, 98, 69, 83, 101, 71, 85, 105, 73, 88, 111, 76, 92, 118, 80, 98, 126, 84, 103, 134, 89, 109, 142, 93, 114, 149, 98, 119, 156, 102, 124, 162, 107, 129, 168, 111, + 133, 173, 115, 137, 177, 118, 140, 181, 121, 143, 183, 123, 144, 185, 124, 146, 187, 125, 147, 187, 126, 147, 188, 126, 147, 188, 126, 147, 188, 125, 146, 187, 124, 145, + 186, 122, 144, 185, 120, 142, 183, 117, 139, 180, 114, 136, 176, 110, 132, 172, 106, 128, 167, 101, 123, 161, 97, 118, 154, 92, 113, 147, 87, 107, 139, 83, 101, 131, 78, + 96, 123, 74, 90, 114, 70, 85, 107, 68, 82, 101, 68, 80, 97, 69, 81, 98, 69, 83, 102, 71, 85, 107, 73, 89, 113, 77, 94, 121, 81, 100, 129, 86, 106, 137, 91, 111, 145, 95, + 116, 152, 100, 122, 159, 105, 127, 166, 110, 131, 171, 114, 136, 176, 118, 139, 180, 121, 143, 184, 124, 145, 187, 126, 147, 189, 127, 149, 190, 128, 149, 191, 129, 150, + 191, 129, 150, 191, 128, 149, 191, 128, 149, 191, 126, 148, 190, 125, 146, 188, 123, 144, 186, 120, 142, 183, 116, 138, 180, 112, 135, 175, 108, 131, 170, 104, 126, 165, + 99, 121, 158, 94, 115, 151, 89, 110, 143, 84, 104, 135, 80, 98, 126, 75, 92, 117, 71, 86, 109, 69, 83, 102, 68, 81, 98, 69, 82, 99, 70, 83, 103, 71, 86, 108, 74, 90, 115, + 78, 96, 123, 83, 101, 132, 88, 107, 140, 92, 113, 148, 97, 118, 155, 102, 123, 161, 107, 128, 168, 111, 133, 173, 115, 137, 178, 119, 141, 182, 123, 144, 186, 125, 147, + 189, 128, 149, 191, 129, 151, 192, 130, 151, 193, 131, 152, 194, 131, 152, 194, 130, 152, 194, 130, 151, 193, 128, 150, 192, 127, 148, 190, 124, 146, 188, 122, 143, 185, + 118, 140, 181, 114, 136, 177, 110, 132, 172, 106, 128, 167, 101, 123, 160, 96, 117, 154, 91, 112, 146, 86, 106, 137, 81, 100, 129, 77, 94, 120, 72, 88, 111, 69, 83, 104, + 68, 81, 99, 69, 82, 100, 70, 84, 104, 72, 87, 110, 75, 91, 117, 79, 97, 125, 84, 103, 134, 89, 109, 142, 94, 114, 149, 98, 119, 156, 102, 124, 162, 107, 129, 168, 111, 133, + 173, 115, 137, 178, 119, 141, 182, 123, 144, 186, 126, 147, 189, 128, 149, 191, 130, 151, 193, 131, 152, 194, 132, 153, 195, 132, 153, 195, 131, 153, 195, 131, 152, 194, + 129, 150, 193, 127, 148, 190, 125, 146, 188, 121, 143, 184, 118, 139, 180, 114, 136, 176, 110, 132, 172, 106, 128, 167, 102, 123, 161, 97, 118, 155, 92, 113, 147, 87, 107, + 139, 82, 101, 131, 77, 95, 122, 73, 89, 114, 70, 84, 106, 68, 82, 101, 69, 82, 101, 70, 84, 105, 72, 88, 111, 76, 92, 118, 80, 98, 127, 85, 104, 135, 89, 109, 143, 94, 115, + 150, 98, 119, 156, 102, 123, 161, 106, 128, 166, 110, 131, 171, 114, 135, 175, 117, 138, 179, 121, 142, 183, 124, 145, 187, 127, 148, 190, 129, 150, 193, 131, 152, 195, + 132, 153, 196, 132, 153, 196, 131, 152, 195, 130, 151, 194, 128, 149, 191, 125, 146, 188, 122, 143, 185, 119, 140, 181, 116, 137, 177, 112, 133, 173, 109, 130, 169, 105, + 126, 165, 101, 122, 160, 97, 118, 154, 92, 113, 148, 88, 108, 141, 83, 102, 133, 78, 96, 124, 74, 91, 116, 71, 86, 108, 69, 82, 102, 69, 83, 102, 71, 85, 107, 73, 89, 113, + 76, 93, 120, 81, 99, 128, 85, 105, 136, 90, 110, 143, 94, 114, 149, 97, 118, 154, 101, 121, 158, 104, 125, 163, 107, 128, 166, 110, 130, 170, 113, 133, 173, 116, 137, 178, + 120, 141, 182, 124, 144, 186, 127, 148, 190, 129, 150, 193, 130, 151, 194, 131, 151, 195, 130, 151, 194, 128, 149, 191, 125, 146, 188, 122, 142, 184, 118, 138, 179, 115, + 135, 175, 111, 131, 171, 108, 129, 168, 106, 126, 164, 103, 123, 161, 100, 120, 157, 96, 117, 153, 92, 113, 148, 88, 108, 141, 84, 103, 134, 79, 97, 126, 75, 92, 118, 71, + 87, 110, 69, 83, 104, 70, 84, 103, 72, 86, 108, 74, 90, 115, 77, 95, 122, 82, 100, 129, 86, 105, 136, 89, 109, 142, 92, 112, 147, 95, 115, 151, 98, 118, 154, 100, 120, 157, + 103, 122, 160, 105, 124, 163, 107, 127, 166, 110, 130, 170, 114, 134, 175, 118, 139, 180, 122, 143, 185, 125, 146, 189, 127, 148, 192, 128, 149, 192, 126, 147, 190, 123, + 144, 187, 120, 140, 182, 115, 136, 177, 111, 131, 172, 108, 128, 167, 105, 125, 163, 103, 122, 160, 101, 120, 158, 99, 119, 156, 97, 117, 154, 95, 115, 150, 92, 112, 146, + 88, 108, 141, 84, 103, 135, 80, 98, 127, 76, 93, 120, 73, 88, 112, 70, 84, 105, 71, 84, 105, 72, 87, 110, 75, 91, 117, 78, 96, 123, 82, 100, 130, 85, 104, 136, 88, 107, + 140, 90, 110, 144, 92, 111, 146, 94, 113, 148, 96, 114, 150, 97, 116, 152, 99, 117, 154, 101, 119, 157, 103, 122, 161, 106, 126, 166, 111, 131, 172, 116, 136, 179, 120, + 141, 184, 123, 144, 188, 124, 145, 188, 122, 143, 186, 118, 139, 182, 113, 133, 175, 108, 128, 169, 104, 123, 163, 101, 119, 158, 98, 117, 154, 97, 115, 151, 96, 114, 150, + 95, 113, 149, 94, 113, 148, 92, 112, 147, 90, 110, 144, 87, 107, 140, 84, 103, 135, 81, 99, 129, 78, 95, 122, 74, 90, 115, 71, 85, 107, 71, 85, 107, 74, 89, 112, 77, 93, + 119, 80, 97, 125, 83, 101, 131, 85, 104, 136, 87, 106, 139, 88, 107, 141, 89, 107, 141, 90, 107, 141, 90, 108, 142, 91, 108, 142, 92, 109, 144, 94, 111, 147, 96, 114, 151, + 99, 118, 157, 103, 123, 164, 108, 129, 171, 114, 135, 178, 118, 140, 183, 119, 141, 184, 117, 138, 182, 112, 133, 176, 106, 126, 168, 101, 120, 160, 97, 115, 154, 94, 111, + 148, 91, 108, 144, 91, 107, 142, 90, 107, 141, 89, 107, 141, 89, 107, 141, 89, 107, 142, 88, 107, 141, 86, 106, 139, 84, 103, 135, 82, 100, 130, 79, 97, 124, 76, 92, 117, + 73, 87, 110, 73, 87, 109, 75, 91, 115, 78, 94, 121, 81, 99, 127, 84, 102, 133, 85, 104, 136, 86, 104, 137, 86, 104, 138, 86, 103, 137, 86, 102, 136, 86, 102, 135, 86, 102, + 134, 87, 102, 136, 88, 104, 139, 90, 107, 143, 93, 111, 149, 97, 116, 156, 103, 123, 165, 109, 130, 173, 114, 136, 179, 116, 137, 181, 113, 134, 177, 107, 128, 170, 100, + 120, 162, 95, 114, 153, 91, 109, 147, 88, 105, 141, 86, 102, 136, 85, 100, 133, 85, 100, 132, 85, 101, 133, 85, 102, 135, 85, 103, 137, 85, 104, 138, 86, 105, 138, 85, 104, + 136, 83, 102, 132, 81, 98, 127, 77, 94, 120, 74, 89, 113, 74, 89, 112, 77, 92, 118, 80, 96, 124, 83, 100, 130, 85, 104, 135, 86, 105, 137, 85, 104, 137, 85, 103, 136, 84, + 101, 134, 83, 99, 132, 83, 98, 130, 83, 97, 129, 83, 97, 130, 84, 99, 132, 86, 102, 138, 88, 106, 144, 92, 111, 151, 98, 119, 160, 106, 127, 170, 111, 133, 177, 113, 135, + 179, 110, 132, 175, 104, 125, 167, 96, 116, 158, 91, 109, 149, 87, 104, 141, 84, 100, 135, 82, 97, 130, 81, 95, 127, 81, 95, 126, 81, 96, 128, 83, 99, 132, 84, 101, 135, + 85, 103, 137, 85, 104, 138, 85, 105, 137, 84, 103, 135, 82, 100, 130, 79, 96, 123, 76, 92, 117, 76, 91, 115, 78, 94, 121, 82, 99, 127, 85, 103, 133, 87, 106, 138, 88, 106, + 140, 86, 105, 139, 85, 103, 137, 84, 100, 134, 82, 97, 130, 81, 96, 127, 81, 95, 126, 82, 95, 127, 83, 97, 131, 86, 101, 136, 88, 105, 143, 91, 110, 150, 97, 117, 159, 104, + 126, 169, 110, 132, 176, 113, 135, 179, 110, 131, 175, 103, 124, 167, 96, 115, 157, 90, 108, 148, 86, 103, 141, 84, 99, 134, 83, 96, 130, 81, 94, 126, 80, 94, 125, 80, 95, + 127, 82, 98, 131, 83, 100, 135, 85, 104, 138, 87, 106, 140, 87, 107, 140, 86, 105, 138, 84, 103, 133, 81, 98, 127, 78, 94, 120, 77, 93, 118, 80, 97, 124, 84, 101, 130, 87, + 105, 136, 89, 108, 141, 90, 109, 143, 89, 108, 143, 88, 106, 141, 86, 103, 137, 85, 100, 134, 84, 98, 131, 83, 97, 129, 84, 97, 130, 85, 99, 134, 87, 103, 139, 90, 107, + 145, 93, 111, 152, 98, 119, 161, 105, 126, 170, 111, 133, 177, 114, 136, 180, 111, 133, 177, 105, 126, 169, 98, 118, 160, 92, 111, 152, 89, 106, 145, 87, 102, 138, 86, 100, + 134, 84, 98, 131, 83, 97, 130, 84, 98, 131, 85, 101, 135, 86, 104, 139, 88, 107, 142, 90, 109, 145, 90, 110, 144, 89, 108, 141, 87, 105, 137, 83, 101, 130, 80, 96, 123, 79, + 94, 120, 82, 98, 126, 85, 103, 133, 89, 108, 140, 92, 111, 145, 94, 113, 148, 93, 112, 148, 93, 111, 148, 92, 109, 145, 91, 107, 142, 90, 105, 140, 89, 104, 138, 89, 104, + 138, 90, 105, 141, 92, 108, 145, 94, 111, 151, 96, 115, 157, 102, 122, 165, 108, 129, 173, 113, 135, 180, 116, 137, 183, 113, 135, 180, 108, 129, 174, 102, 122, 166, 97, + 116, 158, 95, 112, 152, 93, 109, 146, 91, 106, 142, 90, 104, 139, 90, 104, 139, 90, 105, 141, 91, 108, 144, 92, 110, 147, 93, 112, 149, 94, 114, 151, 94, 114, 150, 92, 112, + 146, 89, 108, 140, 85, 103, 133, 81, 98, 126, 80, 96, 122, 83, 100, 129, 87, 105, 136, 91, 110, 143, 95, 114, 149, 97, 117, 153, 98, 118, 155, 98, 118, 156, 98, 116, 154, + 98, 115, 153, 97, 113, 151, 97, 112, 150, 97, 113, 150, 97, 113, 151, 98, 115, 154, 100, 118, 159, 102, 121, 164, 106, 126, 170, 111, 132, 178, 116, 138, 184, 118, 140, + 186, 116, 138, 184, 112, 133, 178, 107, 127, 172, 103, 122, 165, 101, 119, 160, 99, 116, 156, 99, 115, 153, 98, 113, 151, 97, 113, 151, 97, 114, 152, 98, 116, 154, 99, 118, + 156, 99, 119, 158, 99, 119, 157, 98, 118, 155, 95, 115, 150, 91, 110, 144, 87, 105, 136, 83, 100, 129, 81, 97, 124, 84, 101, 131, 88, 106, 138, 93, 112, 145, 97, 117, 152, + 100, 120, 158, 103, 123, 161, 104, 124, 164, 105, 124, 164, 106, 124, 164, 106, 123, 164, 106, 123, 163, 106, 123, 163, 105, 123, 163, 105, 123, 165, 105, 124, 167, 107, + 127, 171, 110, 131, 176, 115, 136, 183, 119, 140, 188, 121, 142, 189, 119, 140, 188, 115, 137, 183, 111, 132, 178, 108, 128, 173, 106, 125, 169, 106, 124, 166, 106, 123, + 164, 106, 123, 164, 106, 123, 164, 106, 124, 165, 106, 125, 166, 106, 126, 166, 105, 126, 166, 104, 124, 163, 101, 121, 159, 97, 117, 153, 93, 112, 146, 88, 107, 138, 84, + 101, 131, 82, 98, 126, 85, 102, 132, 89, 107, 139, 94, 113, 147, 99, 118, 155, 103, 123, 161, 106, 127, 167, 109, 129, 171, 111, 131, 173, 112, 132, 174, 113, 132, 174, + 113, 131, 174, 113, 131, 174, 112, 131, 174, 111, 130, 174, 110, 130, 175, 111, 132, 177, 114, 135, 181, 118, 139, 186, 121, 143, 191, 123, 144, 192, 121, 143, 191, 118, + 140, 187, 115, 136, 183, 112, 133, 179, 112, 132, 177, 112, 131, 175, 113, 132, 175, 114, 132, 175, 114, 132, 175, 114, 133, 176, 114, 133, 176, 112, 132, 175, 110, 131, + 173, 107, 128, 169, 103, 123, 162, 98, 118, 155, 94, 113, 148, 89, 107, 140, 85, 102, 132, 83, 99, 126, 86, 102, 132, 89, 107, 139, 94, 113, 148, 99, 119, 156, 104, 124, + 163, 109, 129, 170, 113, 133, 175, 116, 136, 179, 118, 137, 182, 119, 138, 183, 119, 138, 183, 119, 138, 183, 117, 136, 182, 115, 135, 181, 114, 134, 180, 114, 135, 182, + 116, 137, 185, 120, 141, 189, 123, 144, 193, 124, 145, 194, 123, 144, 193, 120, 141, 190, 117, 138, 186, 115, 136, 183, 115, 136, 182, 117, 136, 182, 118, 138, 183, 119, + 139, 184, 120, 139, 184, 120, 140, 184, 119, 139, 184, 117, 137, 182, 114, 134, 178, 110, 130, 172, 104, 125, 165, 99, 119, 157, 94, 113, 148, 89, 107, 140, 85, 103, 133, + 83, 99, 126, 85, 102, 132, 89, 106, 139, 93, 112, 147, 99, 118, 156, 104, 124, 164, 109, 130, 172, 114, 135, 178, 118, 138, 183, 121, 141, 187, 123, 142, 189, 123, 142, + 189, 122, 142, 188, 120, 140, 187, 118, 138, 185, 116, 136, 184, 116, 136, 184, 117, 138, 186, 121, 141, 190, 123, 144, 193, 124, 145, 194, 123, 144, 193, 120, 141, 190, + 118, 138, 187, 116, 137, 185, 116, 137, 184, 118, 138, 186, 121, 141, 188, 123, 143, 189, 124, 144, 190, 124, 144, 190, 122, 142, 189, 119, 140, 185, 115, 136, 180, 110, + 131, 173, 104, 124, 165, 98, 118, 156, 93, 112, 148, 88, 107, 139, 85, 102, 133, 83, 99, 126, 85, 102, 132, 88, 106, 138, 92, 111, 146, 98, 117, 155, 103, 123, 163, 109, + 129, 171, 114, 135, 179, 119, 139, 185, 122, 143, 189, 124, 144, 192, 124, 144, 192, 123, 143, 191, 121, 141, 189, 118, 138, 186, 115, 136, 184, 115, 135, 184, 116, 136, + 186, 119, 139, 188, 122, 141, 191, 123, 142, 192, 121, 141, 191, 119, 139, 188, 116, 136, 186, 115, 135, 184, 115, 135, 184, 117, 138, 186, 121, 141, 189, 124, 144, 192, + 125, 145, 193, 125, 145, 193, 123, 143, 191, 120, 140, 187, 115, 136, 181, 109, 130, 173, 103, 123, 164, 97, 117, 155, 92, 111, 147, 88, 106, 138, 85, 102, 132, 83, 99, + 126, 85, 102, 131, 88, 105, 137, 91, 110, 144, 96, 115, 153, 101, 121, 161, 107, 127, 170, 113, 133, 178, 118, 138, 184, 121, 142, 189, 123, 143, 192, 124, 144, 192, 122, + 142, 191, 120, 140, 188, 116, 136, 185, 113, 133, 182, 112, 132, 181, 113, 132, 182, 116, 135, 184, 118, 137, 186, 118, 137, 187, 117, 136, 186, 115, 134, 184, 113, 132, + 182, 112, 132, 181, 113, 132, 182, 115, 135, 184, 119, 139, 187, 122, 142, 191, 124, 144, 192, 124, 144, 192, 122, 142, 190, 118, 139, 186, 113, 134, 179, 107, 128, 171, + 101, 121, 162, 96, 115, 153, 91, 110, 145, 87, 105, 137, 85, 101, 131, 83, 98, 125, 85, 101, 130, 87, 104, 135, 90, 108, 142, 94, 113, 150, 99, 119, 159, 105, 125, 167, + 110, 130, 175, 115, 135, 182, 119, 139, 187, 121, 141, 190, 121, 142, 191, 120, 140, 189, 117, 137, 186, 114, 133, 182, 111, 130, 179, 109, 128, 177, 109, 128, 177, 110, + 129, 178, 112, 130, 179, 113, 131, 180, 112, 130, 179, 110, 129, 178, 109, 128, 177, 109, 128, 177, 110, 129, 178, 112, 132, 181, 116, 135, 184, 119, 139, 188, 121, 141, + 190, 121, 141, 190, 119, 139, 188, 115, 136, 183, 110, 131, 177, 105, 125, 168, 99, 119, 159, 94, 113, 151, 89, 108, 143, 86, 104, 136, 84, 101, 130, 83, 98, 124, 84, 100, + 128, 86, 103, 134, 89, 107, 140, 93, 112, 148, 97, 117, 156, 102, 122, 164, 107, 127, 172, 112, 132, 179, 115, 136, 184, 118, 138, 187, 118, 138, 188, 117, 137, 187, 115, + 134, 184, 111, 131, 180, 108, 127, 176, 106, 124, 173, 105, 123, 172, 105, 123, 172, 106, 124, 172, 107, 124, 173, 106, 124, 172, 105, 123, 172, 104, 123, 171, 105, 123, + 172, 107, 126, 175, 110, 129, 178, 113, 133, 181, 115, 136, 185, 117, 137, 186, 117, 137, 186, 115, 136, 184, 112, 132, 180, 107, 128, 173, 102, 122, 165, 96, 116, 157, 92, + 111, 148, 88, 106, 140, 85, 103, 134, 84, 100, 128, 83, 98, 123, 84, 100, 127, 86, 102, 132, 88, 106, 138, 92, 110, 146, 96, 115, 153, 100, 120, 161, 104, 124, 168, 108, + 128, 174, 111, 132, 179, 113, 134, 182, 114, 135, 183, 114, 134, 183, 112, 132, 181, 110, 129, 177, 107, 125, 174, 104, 122, 171, 103, 120, 168, 102, 119, 167, 102, 119, + 167, 102, 119, 167, 102, 119, 167, 102, 119, 167, 102, 120, 167, 103, 121, 169, 105, 124, 172, 108, 127, 175, 110, 130, 178, 112, 132, 181, 113, 133, 182, 112, 133, 181, + 111, 132, 180, 108, 129, 175, 104, 124, 169, 99, 119, 162, 94, 114, 153, 90, 110, 146, 87, 105, 139, 84, 102, 132, 83, 99, 127, 83, 98, 122, 84, 99, 125, 85, 102, 130, 87, + 104, 136, 90, 109, 143, 94, 113, 150, 97, 117, 157, 101, 121, 164, 104, 125, 170, 107, 128, 175, 109, 130, 178, 110, 131, 179, 110, 131, 178, 109, 129, 177, 108, 127, 175, + 106, 125, 172, 104, 122, 169, 103, 120, 167, 102, 118, 166, 101, 118, 165, 101, 118, 165, 101, 118, 165, 101, 118, 165, 102, 119, 166, 103, 121, 168, 105, 123, 170, 107, + 126, 173, 108, 128, 175, 109, 129, 177, 109, 129, 177, 108, 129, 177, 107, 128, 175, 104, 126, 171, 101, 122, 165, 96, 117, 158, 92, 112, 151, 89, 108, 143, 86, 104, 136, + 84, 101, 130, 83, 98, 126, 82, 97, 120, 83, 98, 124, 85, 100, 128, 87, 103, 134, 89, 107, 141, 92, 111, 148, 95, 115, 154, 98, 119, 160, 101, 122, 166, 104, 125, 170, 105, + 126, 173, 106, 127, 174, 106, 127, 173, 106, 126, 173, 106, 125, 172, 106, 124, 170, 105, 122, 169, 104, 121, 167, 103, 119, 166, 103, 119, 165, 103, 118, 165, 103, 118, + 165, 103, 119, 165, 103, 120, 166, 104, 121, 167, 104, 123, 169, 105, 124, 170, 106, 125, 171, 105, 126, 172, 105, 126, 172, 104, 126, 172, 103, 125, 170, 101, 123, 167, + 98, 119, 161, 94, 115, 155, 91, 111, 148, 88, 107, 141, 85, 103, 134, 83, 100, 129, 82, 98, 124, 82, 96, 119, 83, 97, 122, 84, 100, 127, 86, 103, 132, 89, 106, 138, 91, + 110, 145, 94, 114, 151, 96, 117, 157, 99, 120, 163, 101, 122, 166, 102, 123, 168, 102, 123, 169, 102, 123, 168, 103, 122, 168, 103, 122, 167, 104, 121, 167, 104, 121, 167, + 104, 120, 166, 104, 119, 166, 104, 119, 166, 104, 119, 166, 104, 119, 165, 103, 119, 165, 103, 119, 165, 103, 119, 165, 103, 120, 166, 103, 121, 166, 102, 121, 166, 102, + 122, 167, 102, 122, 168, 101, 123, 168, 101, 122, 167, 99, 121, 163, 96, 117, 158, 93, 113, 152, 90, 109, 145, 87, 106, 139, 85, 102, 133, 83, 99, 127, 82, 97, 123, 82, 95, + 117, 82, 96, 120, 84, 99, 124, 85, 102, 130, 88, 105, 136, 91, 109, 143, 93, 113, 149, 96, 116, 155, 98, 119, 160, 99, 121, 163, 100, 121, 165, 99, 120, 165, 99, 119, 164, + 99, 118, 163, 99, 117, 162, 100, 117, 163, 101, 117, 163, 102, 117, 164, 103, 117, 164, 103, 117, 165, 103, 117, 165, 103, 117, 164, 102, 117, 164, 101, 116, 163, 100, 115, + 162, 99, 115, 161, 98, 115, 160, 98, 116, 161, 98, 118, 162, 99, 119, 163, 99, 121, 164, 99, 121, 163, 97, 119, 161, 95, 116, 155, 92, 112, 149, 89, 108, 143, 87, 105, 137, + 85, 102, 131, 83, 99, 125, 82, 97, 121, 82, 95, 116, 82, 96, 119, 83, 98, 123, 85, 101, 128, 87, 104, 134, 90, 108, 140, 92, 112, 147, 95, 115, 152, 97, 118, 157, 98, 120, + 161, 98, 120, 162, 98, 119, 162, 97, 116, 160, 95, 114, 158, 95, 112, 157, 96, 112, 157, 97, 112, 158, 99, 113, 160, 101, 114, 162, 102, 114, 163, 102, 115, 163, 102, 114, + 163, 101, 113, 161, 99, 112, 160, 97, 111, 158, 95, 110, 156, 94, 111, 156, 94, 112, 157, 95, 115, 158, 97, 117, 160, 98, 119, 162, 98, 119, 161, 96, 118, 158, 94, 115, + 153, 91, 111, 147, 88, 107, 140, 86, 104, 134, 84, 101, 129, 83, 98, 124, 82, 96, 120, 81, 94, 115, 82, 95, 117, 82, 97, 121, 84, 99, 125, 86, 103, 131, 89, 106, 137, 91, + 110, 144, 94, 114, 150, 96, 117, 155, 98, 119, 159, 98, 119, 161, 97, 118, 160, 95, 115, 159, 94, 112, 156, 92, 109, 154, 92, 107, 154, 94, 108, 155, 96, 109, 157, 99, 110, + 159, 100, 111, 160, 101, 112, 161, 100, 112, 161, 99, 111, 160, 97, 109, 158, 95, 108, 156, 93, 107, 154, 93, 108, 154, 93, 111, 155, 94, 114, 157, 96, 117, 159, 97, 118, + 160, 97, 118, 159, 95, 117, 155, 93, 113, 150, 90, 109, 144, 87, 105, 138, 85, 102, 132, 83, 99, 126, 82, 97, 122, 81, 95, 118, 81, 93, 113, 81, 94, 116, 82, 96, 119, 83, + 98, 123, 85, 101, 128, 87, 104, 134, 90, 108, 141, 93, 112, 147, 95, 116, 152, 97, 118, 157, 98, 119, 160, 97, 118, 160, 96, 116, 159, 94, 113, 157, 93, 109, 155, 92, 107, + 154, 94, 107, 155, 96, 107, 157, 98, 109, 159, 99, 110, 160, 100, 110, 161, 100, 110, 161, 98, 109, 159, 96, 108, 157, 94, 107, 156, 93, 107, 155, 93, 108, 155, 94, 111, + 156, 95, 114, 158, 96, 117, 159, 97, 118, 159, 96, 118, 157, 94, 115, 152, 91, 111, 147, 88, 107, 141, 86, 104, 135, 84, 101, 129, 82, 98, 124, 81, 96, 120, 80, 93, 116, + 80, 92, 112, 80, 93, 114, 81, 95, 117, 82, 96, 121, 83, 99, 125, 86, 102, 131, 88, 106, 137, 91, 110, 143, 93, 113, 149, 96, 117, 154, 98, 119, 158, 98, 119, 160, 97, 117, + 161, 96, 115, 160, 95, 112, 159, 94, 109, 158, 95, 108, 158, 97, 108, 159, 98, 109, 161, 100, 110, 162, 100, 110, 162, 100, 110, 162, 99, 109, 161, 97, 108, 159, 96, 108, + 158, 95, 109, 158, 95, 111, 158, 96, 114, 159, 97, 116, 160, 97, 118, 160, 97, 118, 158, 95, 116, 154, 92, 113, 149, 89, 109, 143, 86, 105, 137, 84, 102, 131, 83, 99, 126, + 82, 97, 122, 80, 94, 117, 79, 92, 114, 80, 92, 111, 80, 92, 112, 81, 93, 115, 81, 95, 118, 82, 97, 123, 84, 100, 128, 86, 104, 133, 89, 107, 139, 92, 111, 145, 94, 115, + 151, 97, 118, 156, 98, 119, 159, 98, 119, 161, 98, 118, 162, 98, 115, 162, 97, 113, 162, 97, 112, 162, 98, 111, 163, 99, 111, 164, 101, 112, 165, 101, 112, 165, 101, 112, + 165, 100, 111, 164, 99, 111, 163, 98, 112, 162, 98, 113, 162, 98, 115, 162, 98, 117, 162, 98, 119, 162, 98, 119, 160, 96, 117, 156, 93, 114, 151, 90, 110, 145, 87, 106, + 139, 85, 103, 133, 83, 100, 128, 82, 97, 123, 80, 95, 119, 79, 93, 115, 79, 92, 112, 80, 91, 110, 80, 91, 111, 80, 92, 113, 80, 93, 116, 81, 95, 120, 83, 98, 124, 84, 101, + 129, 86, 104, 135, 89, 108, 141, 92, 112, 147, 95, 115, 152, 97, 118, 157, 99, 119, 160, 100, 120, 163, 100, 119, 164, 100, 117, 165, 100, 116, 165, 100, 115, 166, 101, + 114, 166, 102, 114, 167, 102, 114, 167, 102, 114, 167, 101, 114, 166, 101, 115, 166, 101, 116, 166, 101, 117, 165, 100, 119, 165, 100, 120, 164, 99, 120, 161, 97, 118, 158, + 95, 115, 153, 91, 111, 147, 88, 107, 141, 85, 104, 135, 83, 100, 130, 82, 98, 125, 80, 95, 121, 79, 93, 117, 79, 92, 114, 79, 91, 111, 80, 91, 109, 80, 91, 110, 79, 91, + 112, 79, 92, 114, 80, 94, 117, 81, 95, 121, 82, 98, 125, 85, 101, 131, 87, 105, 136, 90, 109, 142, 93, 113, 148, 95, 116, 154, 98, 119, 158, 100, 120, 162, 101, 121, 165, + 102, 121, 167, 102, 120, 168, 103, 119, 168, 103, 118, 169, 103, 118, 169, 103, 118, 169, 103, 118, 169, 103, 118, 169, 103, 119, 168, 103, 120, 168, 102, 121, 167, 102, + 121, 166, 100, 121, 163, 98, 119, 159, 96, 116, 155, 93, 113, 149, 89, 109, 143, 86, 104, 136, 83, 101, 131, 82, 98, 126, 80, 96, 122, 80, 94, 118, 79, 92, 115, 78, 91, + 112, 79, 91, 110, 81, 91, 109, 80, 91, 109, 79, 90, 110, 79, 91, 112, 79, 92, 115, 80, 94, 118, 81, 96, 122, 83, 99, 127, 85, 102, 132, 87, 106, 138, 90, 110, 144, 93, 113, + 149, 96, 117, 155, 99, 119, 160, 102, 122, 164, 103, 123, 167, 104, 123, 169, 105, 123, 170, 105, 122, 171, 105, 122, 171, 106, 122, 171, 106, 122, 171, 105, 122, 171, 105, + 122, 170, 105, 123, 169, 104, 123, 168, 102, 122, 165, 100, 120, 161, 97, 117, 156, 94, 114, 151, 90, 110, 144, 87, 106, 138, 84, 101, 132, 82, 98, 127, 80, 96, 123, 79, + 94, 119, 79, 93, 116, 78, 91, 113, 78, 91, 111, 79, 91, 110, 81, 91, 108, 80, 90, 109, 79, 90, 110, 79, 90, 111, 79, 91, 113, 79, 92, 116, 79, 93, 119, 81, 96, 123, 83, 99, + 128, 85, 102, 133, 88, 106, 138, 91, 110, 144, 94, 114, 150, 97, 117, 156, 100, 120, 161, 103, 123, 165, 105, 124, 168, 106, 125, 170, 107, 125, 172, 108, 125, 173, 108, + 125, 173, 108, 125, 173, 108, 125, 172, 107, 125, 171, 106, 125, 169, 104, 123, 166, 101, 121, 162, 98, 118, 157, 95, 115, 152, 91, 111, 146, 88, 106, 139, 84, 102, 133, + 82, 99, 128, 80, 96, 123, 79, 94, 120, 79, 93, 117, 79, 92, 115, 78, 91, 112, 79, 91, 111, 80, 91, 110, 82, 92, 109, 81, 91, 109, 80, 91, 110, 79, 90, 110, 79, 91, 112, 79, + 91, 114, 78, 92, 116, 79, 94, 120, 81, 96, 124, 83, 99, 128, 85, 102, 133, 88, 106, 139, 91, 110, 145, 94, 114, 151, 98, 117, 156, 101, 121, 161, 104, 123, 166, 106, 125, + 169, 108, 126, 171, 109, 127, 172, 109, 127, 173, 109, 127, 172, 108, 126, 171, 107, 125, 169, 105, 124, 167, 102, 121, 163, 99, 118, 158, 96, 115, 152, 92, 111, 147, 89, + 107, 140, 85, 103, 134, 82, 99, 128, 80, 96, 124, 79, 94, 120, 78, 93, 118, 78, 92, 116, 79, 92, 114, 79, 91, 112, 80, 92, 111, 81, 92, 110, 82, 92, 109, 82, 92, 109, 81, + 91, 110, 80, 91, 110, 79, 90, 111, 78, 90, 113, 78, 91, 114, 78, 92, 117, 79, 94, 120, 81, 96, 124, 82, 99, 128, 84, 102, 133, 87, 105, 138, 91, 109, 144, 94, 113, 150, 97, + 116, 155, 100, 119, 160, 103, 122, 164, 105, 123, 166, 106, 124, 168, 107, 125, 169, 106, 124, 168, 105, 124, 167, 104, 122, 164, 101, 120, 161, 98, 117, 156, 95, 114, 151, + 92, 110, 146, 89, 107, 140, 85, 102, 134, 82, 99, 129, 80, 96, 124, 79, 94, 121, 78, 93, 118, 78, 92, 116, 79, 92, 114, 79, 91, 113, 80, 92, 112, 82, 93, 112, 82, 93, 111, + 83, 93, 109, 83, 92, 110, 82, 92, 110, 81, 91, 111, 80, 91, 112, 79, 91, 112, 78, 91, 113, 78, 91, 115, 79, 93, 118, 79, 94, 121, 80, 96, 124, 82, 98, 127, 84, 100, 132, + 86, 104, 137, 89, 107, 142, 93, 111, 147, 95, 114, 152, 98, 116, 156, 100, 118, 159, 101, 119, 161, 102, 119, 162, 101, 119, 161, 100, 118, 159, 98, 116, 157, 96, 114, 153, + 94, 111, 149, 91, 108, 144, 88, 105, 139, 84, 101, 133, 82, 98, 128, 80, 96, 124, 79, 94, 121, 78, 92, 118, 78, 92, 116, 78, 91, 115, 79, 92, 114, 80, 92, 113, 82, 93, 113, + 83, 94, 112, 84, 94, 112 +}; + +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_train_std.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_train_std.h new file mode 100644 index 0000000000000000000000000000000000000000..591ec55df667686fa5e915565a26e47c6d1eac6b --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_feature_train_std.h @@ -0,0 +1,133 @@ +/** +* Copyright 2020 Huawei Technologies Co., Ltd +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at + +* http://www.apache.org/licenses/LICENSE-2.0 + +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +#ifndef FACE_FEATURE_MASK_STD_H_ +#define FACE_FEATURE_MASK_STD_H_ + +const float kTrainStd[4800] = {58, 61, 66, 57, 59, 65, 56, 58, 65, 54, 58, 65, 53, 57, 65, 53, 56, 65, 53, 56, 65, 53, 56, 65, 53, 55, 64, 52, 55, 63, 52, 54, 62, 52, 53, 60, 52, 53, 59, 52, 52, 57, +51, 51, 56, 51, 51, 55, 51, 51, 54, 51, 50, 54, 51, 50, 53, 51, 50, 53, 50, 50, 53, 51, 50, 53, 51, 50, 53, 51, 50, 53, 51, 50, 54, 51, 50, 54, 51, 51, 55, 51, 52, 57, 52, +52, 58, 52, 53, 59, 52, 54, 61, 53, 55, 62, 53, 56, 64, 53, 56, 65, 53, 56, 65, 53, 57, 65, 53, 57, 65, 54, 58, 65, 55, 58, 65, 57, 59, 65, 58, 60, 66, 57, 59, 65, 55, 58, +65, 54, 57, 65, 53, 56, 65, 53, 56, 64, 53, 55, 64, 52, 55, 63, 52, 54, 62, 52, 54, 61, 52, 53, 59, 51, 52, 58, 51, 51, 56, 50, 50, 55, 50, 50, 54, 50, 49, 53, 50, 49, 52, +50, 48, 51, 49, 48, 50, 49, 48, 50, 49, 48, 50, 49, 48, 50, 50, 48, 50, 50, 48, 51, 49, 48, 51, 50, 49, 52, 50, 49, 53, 50, 50, 54, 51, 51, 56, 51, 52, 57, 52, 53, 59, 52, +54, 61, 52, 55, 62, 52, 55, 64, 53, 56, 64, 53, 56, 65, 53, 57, 65, 54, 57, 65, 54, 58, 65, 56, 59, 65, 58, 60, 65, 56, 59, 65, 55, 58, 65, 54, 57, 65, 53, 56, 64, 53, 56, +64, 52, 55, 63, 52, 54, 62, 52, 53, 61, 51, 53, 59, 51, 52, 58, 50, 51, 56, 50, 50, 54, 50, 49, 53, 49, 48, 51, 49, 48, 50, 49, 47, 49, 49, 47, 49, 48, 47, 48, 48, 46, 48, +48, 46, 48, 48, 46, 48, 48, 47, 48, 48, 47, 48, 48, 47, 49, 49, 47, 50, 49, 48, 51, 50, 49, 53, 50, 50, 54, 51, 51, 56, 51, 52, 58, 51, 53, 60, 52, 54, 61, 52, 54, 62, 52, +55, 64, 53, 56, 64, 53, 56, 65, 53, 56, 65, 54, 57, 64, 56, 58, 65, 57, 59, 65, 55, 58, 64, 55, 58, 65, 53, 56, 64, 53, 56, 64, 52, 55, 63, 52, 54, 62, 51, 53, 61, 51, 53, +59, 51, 52, 58, 50, 51, 56, 50, 50, 55, 50, 49, 53, 49, 48, 52, 49, 48, 50, 48, 47, 49, 48, 46, 48, 48, 46, 47, 47, 45, 46, 47, 45, 46, 47, 45, 46, 47, 45, 46, 47, 45, 46, +48, 46, 47, 48, 46, 48, 48, 47, 49, 49, 48, 50, 49, 49, 52, 50, 49, 53, 50, 50, 55, 50, 51, 56, 51, 52, 58, 51, 53, 60, 51, 53, 61, 52, 54, 62, 52, 55, 63, 53, 56, 64, 53, +56, 64, 54, 57, 64, 55, 58, 64, 57, 59, 65, 55, 58, 64, 54, 57, 64, 53, 56, 63, 52, 55, 63, 52, 54, 62, 51, 53, 61, 51, 53, 60, 50, 52, 58, 50, 51, 57, 50, 50, 56, 49, 50, +54, 49, 49, 53, 49, 49, 52, 49, 48, 50, 48, 47, 49, 47, 46, 47, 47, 45, 46, 46, 44, 44, 46, 44, 44, 46, 44, 44, 46, 44, 44, 47, 44, 45, 47, 45, 46, 48, 46, 48, 48, 47, 49, +49, 48, 51, 49, 49, 52, 49, 49, 53, 49, 49, 54, 50, 50, 55, 50, 51, 57, 50, 51, 58, 50, 52, 59, 51, 53, 61, 51, 54, 62, 52, 55, 63, 53, 56, 64, 53, 56, 64, 55, 58, 64, 56, +59, 64, 55, 57, 64, 54, 56, 64, 52, 55, 63, 52, 54, 62, 51, 54, 61, 50, 53, 60, 50, 52, 59, 50, 51, 57, 49, 51, 56, 49, 50, 55, 49, 50, 54, 49, 50, 54, 49, 49, 53, 49, 48, +51, 48, 47, 49, 48, 46, 47, 47, 44, 45, 46, 43, 44, 46, 43, 43, 46, 43, 43, 46, 43, 44, 47, 44, 45, 48, 45, 47, 48, 47, 49, 49, 48, 51, 49, 49, 52, 49, 49, 53, 49, 49, 54, +49, 49, 54, 49, 50, 55, 49, 50, 56, 49, 51, 57, 50, 51, 58, 50, 52, 60, 51, 54, 61, 52, 54, 63, 52, 55, 63, 53, 56, 64, 55, 58, 64, 56, 59, 64, 55, 57, 64, 53, 56, 63, 52, +55, 62, 51, 54, 61, 51, 53, 60, 50, 52, 59, 49, 51, 58, 49, 50, 57, 49, 50, 56, 49, 50, 55, 49, 50, 55, 49, 50, 55, 50, 50, 54, 50, 49, 53, 49, 48, 51, 48, 47, 49, 47, 45, +47, 46, 44, 45, 46, 43, 43, 46, 43, 43, 46, 43, 44, 47, 45, 46, 48, 46, 48, 49, 48, 50, 50, 49, 52, 50, 49, 54, 50, 50, 55, 50, 50, 55, 49, 50, 55, 49, 50, 55, 49, 50, 56, +49, 50, 56, 49, 51, 57, 50, 51, 58, 50, 53, 60, 51, 54, 62, 52, 55, 63, 53, 56, 64, 55, 57, 64, 56, 58, 64, 55, 57, 64, 53, 56, 63, 52, 54, 62, 51, 53, 60, 50, 52, 59, 49, +51, 58, 49, 50, 57, 48, 50, 56, 48, 49, 56, 49, 50, 56, 49, 50, 56, 49, 50, 56, 50, 50, 56, 50, 50, 55, 49, 49, 53, 49, 48, 51, 48, 46, 49, 47, 45, 46, 46, 43, 44, 45, 43, +44, 46, 44, 45, 47, 45, 47, 48, 47, 49, 49, 48, 52, 50, 50, 54, 50, 50, 55, 50, 51, 56, 50, 50, 56, 49, 50, 56, 48, 49, 56, 48, 49, 56, 48, 49, 56, 48, 50, 56, 49, 50, 57, +50, 52, 59, 51, 53, 61, 52, 55, 62, 53, 56, 63, 55, 57, 64, 56, 58, 64, 54, 57, 63, 53, 56, 63, 52, 54, 61, 51, 53, 60, 50, 51, 58, 49, 50, 57, 48, 50, 56, 48, 49, 56, 48, +50, 56, 48, 50, 56, 48, 50, 56, 49, 50, 56, 49, 50, 56, 49, 49, 55, 49, 49, 54, 48, 48, 52, 48, 47, 50, 47, 45, 47, 46, 44, 45, 45, 43, 44, 46, 44, 45, 47, 45, 48, 48, 47, +50, 49, 48, 53, 49, 49, 54, 49, 50, 56, 49, 50, 56, 49, 50, 57, 48, 50, 57, 48, 49, 56, 48, 49, 56, 47, 49, 55, 47, 49, 56, 48, 50, 57, 49, 51, 58, 50, 53, 60, 52, 54, 62, +53, 56, 63, 55, 57, 64, 56, 58, 64, 55, 57, 63, 53, 56, 63, 52, 54, 61, 50, 52, 59, 49, 51, 57, 48, 50, 56, 48, 49, 55, 47, 49, 55, 47, 49, 56, 47, 49, 56, 47, 49, 56, 47, +49, 56, 48, 49, 56, 48, 48, 55, 47, 48, 53, 47, 47, 52, 47, 47, 51, 47, 45, 48, 46, 44, 46, 45, 43, 45, 46, 44, 46, 47, 46, 48, 48, 47, 51, 48, 47, 52, 48, 48, 53, 47, 48, +54, 47, 48, 55, 47, 49, 56, 47, 49, 56, 47, 48, 55, 47, 48, 55, 47, 48, 55, 47, 48, 55, 47, 49, 56, 48, 51, 58, 50, 52, 60, 51, 54, 61, 53, 56, 63, 55, 57, 64, 56, 58, 64, +55, 57, 63, 53, 56, 62, 52, 54, 61, 51, 52, 59, 49, 51, 57, 48, 49, 56, 47, 48, 55, 46, 48, 54, 46, 48, 54, 46, 48, 55, 46, 48, 55, 46, 47, 55, 46, 47, 54, 46, 47, 53, 46, +46, 52, 46, 46, 52, 46, 46, 50, 46, 45, 48, 46, 44, 46, 45, 44, 45, 46, 44, 46, 47, 46, 49, 47, 46, 50, 46, 46, 51, 46, 46, 52, 46, 46, 53, 46, 47, 53, 46, 47, 54, 46, 47, +54, 46, 47, 54, 45, 47, 54, 46, 47, 54, 46, 48, 54, 47, 48, 55, 48, 50, 57, 50, 52, 59, 51, 54, 61, 53, 55, 63, 55, 57, 64, 56, 58, 64, 55, 57, 63, 54, 56, 62, 52, 54, 61, +51, 52, 59, 49, 50, 57, 48, 49, 56, 47, 48, 55, 46, 47, 54, 46, 47, 54, 45, 46, 53, 46, 47, 54, 46, 47, 54, 46, 46, 53, 46, 46, 52, 45, 45, 51, 45, 45, 51, 46, 46, 50, 46, +46, 48, 46, 45, 46, 45, 44, 45, 46, 45, 46, 47, 46, 48, 47, 46, 50, 46, 46, 51, 45, 46, 52, 46, 46, 52, 46, 46, 53, 46, 46, 54, 46, 47, 54, 45, 47, 54, 45, 46, 54, 46, 47, +54, 46, 48, 54, 47, 49, 55, 48, 50, 57, 50, 52, 59, 52, 54, 62, 53, 55, 63, 55, 57, 64, 56, 58, 64, 55, 57, 64, 54, 55, 62, 52, 54, 61, 51, 52, 59, 49, 51, 57, 49, 50, 56, +48, 49, 55, 47, 48, 55, 46, 47, 54, 46, 47, 54, 46, 47, 54, 46, 47, 54, 46, 46, 53, 46, 46, 52, 45, 45, 51, 45, 45, 50, 46, 45, 49, 46, 45, 48, 46, 45, 46, 46, 44, 45, 46, +45, 46, 47, 46, 48, 47, 46, 49, 46, 46, 50, 45, 45, 51, 45, 46, 52, 46, 46, 54, 46, 47, 54, 45, 47, 54, 46, 47, 54, 46, 47, 54, 47, 48, 55, 47, 48, 55, 48, 49, 56, 49, 50, +57, 51, 52, 60, 52, 54, 62, 53, 55, 63, 55, 57, 64, 56, 58, 64, 55, 57, 63, 54, 55, 62, 53, 54, 61, 51, 52, 59, 50, 51, 57, 49, 50, 57, 48, 49, 56, 47, 48, 55, 47, 48, 54, +46, 47, 54, 46, 47, 54, 46, 47, 54, 46, 47, 54, 46, 46, 52, 46, 46, 51, 46, 45, 50, 46, 45, 49, 46, 45, 47, 46, 45, 46, 46, 44, 44, 46, 44, 45, 46, 45, 47, 46, 46, 48, 46, +46, 49, 45, 45, 51, 46, 46, 52, 46, 47, 54, 46, 47, 54, 46, 47, 55, 46, 47, 55, 47, 48, 55, 47, 48, 55, 48, 49, 55, 49, 50, 56, 49, 51, 58, 51, 53, 60, 52, 54, 62, 53, 55, +63, 55, 57, 64, 56, 58, 64, 55, 57, 63, 54, 56, 63, 53, 54, 61, 51, 52, 59, 50, 51, 58, 49, 50, 57, 49, 49, 55, 48, 49, 55, 48, 48, 54, 48, 48, 55, 47, 48, 55, 47, 48, 55, +47, 47, 54, 47, 47, 52, 46, 46, 50, 46, 45, 49, 46, 45, 48, 46, 44, 46, 46, 44, 44, 45, 43, 43, 46, 44, 43, 46, 44, 45, 46, 45, 47, 46, 46, 49, 46, 46, 50, 46, 47, 52, 47, +47, 54, 47, 48, 55, 47, 48, 55, 47, 48, 55, 47, 48, 55, 48, 49, 55, 48, 49, 55, 49, 50, 56, 50, 51, 58, 51, 53, 60, 53, 54, 62, 54, 55, 63, 55, 56, 64, 56, 58, 64, 55, 57, +63, 54, 56, 63, 53, 54, 61, 51, 52, 59, 51, 51, 58, 50, 50, 56, 49, 49, 54, 48, 48, 54, 48, 48, 53, 47, 47, 53, 47, 47, 53, 47, 47, 52, 47, 47, 52, 46, 46, 50, 46, 45, 48, +45, 45, 47, 46, 44, 46, 45, 44, 44, 45, 43, 42, 45, 42, 41, 45, 43, 42, 46, 44, 43, 46, 44, 45, 46, 45, 47, 46, 45, 48, 46, 46, 50, 47, 47, 52, 47, 47, 53, 47, 48, 53, 48, +48, 53, 48, 48, 53, 48, 48, 54, 49, 49, 55, 49, 50, 56, 50, 51, 58, 52, 53, 60, 53, 54, 62, 54, 55, 63, 55, 56, 64, 57, 58, 64, 55, 57, 63, 54, 56, 63, 53, 54, 61, 52, 52, +59, 51, 51, 57, 50, 50, 55, 49, 48, 54, 48, 47, 52, 47, 47, 51, 47, 46, 50, 47, 46, 50, 46, 46, 50, 46, 46, 49, 46, 45, 48, 45, 44, 46, 45, 44, 45, 45, 43, 44, 45, 43, 42, +44, 42, 41, 44, 42, 40, 45, 42, 40, 45, 42, 41, 45, 43, 43, 45, 44, 44, 46, 44, 46, 46, 45, 47, 46, 45, 48, 47, 46, 49, 47, 46, 50, 47, 46, 50, 47, 46, 50, 48, 47, 52, 48, +48, 53, 49, 49, 55, 51, 51, 58, 52, 53, 60, 53, 54, 62, 54, 55, 63, 55, 56, 64, 57, 58, 65, 55, 57, 63, 54, 56, 62, 53, 54, 61, 52, 52, 59, 51, 51, 57, 50, 49, 55, 49, 48, +52, 47, 46, 50, 47, 45, 49, 46, 45, 48, 46, 45, 47, 45, 44, 46, 45, 44, 46, 45, 43, 45, 45, 43, 44, 45, 43, 43, 45, 43, 42, 44, 42, 41, 44, 42, 40, 44, 41, 39, 44, 41, 39, +44, 42, 40, 45, 43, 42, 45, 43, 43, 45, 43, 44, 45, 44, 45, 46, 44, 45, 46, 44, 46, 46, 44, 46, 46, 44, 47, 46, 45, 48, 47, 46, 49, 48, 47, 51, 49, 49, 54, 50, 51, 57, 52, +53, 60, 53, 54, 62, 54, 55, 63, 55, 56, 64, 57, 58, 65, 55, 57, 63, 54, 56, 62, 53, 54, 61, 52, 52, 59, 50, 51, 57, 49, 49, 54, 48, 47, 51, 47, 45, 48, 46, 44, 46, 45, 43, +45, 45, 43, 44, 45, 43, 43, 44, 42, 43, 45, 43, 42, 44, 42, 42, 45, 43, 42, 45, 43, 42, 45, 42, 41, 44, 42, 40, 44, 41, 39, 44, 42, 40, 44, 42, 41, 45, 43, 41, 45, 43, 42, +45, 43, 43, 45, 43, 43, 45, 43, 43, 45, 43, 43, 45, 43, 43, 45, 43, 44, 46, 44, 45, 47, 45, 47, 48, 47, 50, 49, 48, 53, 50, 50, 57, 52, 52, 60, 53, 54, 62, 54, 55, 63, 55, +56, 64, 57, 58, 65, 56, 57, 64, 55, 56, 63, 53, 54, 61, 52, 52, 59, 51, 51, 57, 49, 49, 54, 48, 47, 51, 46, 45, 47, 45, 43, 44, 45, 42, 43, 44, 42, 42, 44, 42, 41, 44, 42, +41, 45, 42, 42, 45, 43, 42, 45, 43, 43, 45, 44, 43, 45, 43, 42, 45, 43, 41, 44, 42, 41, 45, 43, 41, 45, 43, 42, 45, 43, 43, 45, 44, 43, 45, 44, 43, 45, 44, 43, 45, 43, 42, +45, 43, 42, 45, 42, 42, 45, 43, 43, 46, 43, 44, 46, 44, 46, 48, 46, 50, 49, 48, 53, 50, 50, 57, 52, 52, 60, 53, 54, 62, 54, 55, 63, 55, 56, 64, 58, 58, 65, 56, 57, 64, 55, +56, 63, 53, 54, 61, 52, 52, 59, 51, 51, 57, 49, 49, 54, 48, 47, 51, 46, 45, 47, 45, 43, 44, 45, 42, 42, 44, 42, 41, 44, 42, 41, 45, 42, 41, 45, 43, 42, 45, 44, 43, 46, 44, +44, 46, 45, 44, 46, 45, 44, 46, 45, 44, 46, 45, 44, 46, 45, 45, 46, 45, 45, 46, 44, 44, 46, 44, 44, 45, 44, 44, 46, 44, 44, 45, 44, 43, 45, 43, 42, 45, 42, 42, 45, 42, 42, +45, 43, 44, 46, 44, 46, 47, 46, 49, 49, 48, 53, 50, 50, 57, 51, 52, 60, 53, 54, 62, 54, 55, 63, 55, 56, 64, 58, 59, 65, 57, 57, 64, 55, 56, 63, 53, 54, 61, 52, 52, 60, 51, +51, 57, 49, 49, 54, 48, 47, 51, 47, 45, 48, 45, 43, 44, 45, 42, 42, 44, 42, 41, 45, 42, 42, 45, 43, 42, 45, 43, 43, 45, 44, 44, 46, 45, 45, 46, 45, 46, 47, 46, 47, 47, 46, +47, 47, 47, 48, 47, 46, 47, 47, 46, 47, 46, 45, 46, 46, 45, 45, 45, 44, 45, 45, 44, 44, 45, 44, 44, 45, 43, 43, 45, 43, 43, 45, 43, 43, 46, 43, 44, 46, 44, 47, 47, 46, 50, +49, 48, 54, 50, 50, 57, 51, 52, 60, 52, 53, 62, 54, 55, 63, 55, 56, 64, 59, 60, 66, 57, 58, 64, 56, 56, 63, 54, 54, 62, 52, 52, 60, 51, 51, 57, 49, 49, 55, 48, 47, 52, 47, +45, 48, 45, 43, 45, 45, 42, 43, 45, 42, 42, 44, 42, 42, 45, 43, 43, 45, 43, 44, 45, 44, 45, 46, 45, 46, 46, 46, 48, 47, 47, 49, 47, 47, 49, 48, 48, 50, 48, 47, 49, 47, 47, +49, 47, 46, 48, 46, 45, 47, 45, 44, 46, 45, 44, 45, 45, 44, 45, 45, 43, 44, 45, 43, 43, 45, 43, 44, 46, 43, 45, 46, 44, 47, 47, 46, 50, 48, 48, 54, 50, 50, 57, 51, 52, 60, +52, 54, 62, 54, 55, 63, 56, 57, 64, 60, 60, 66, 58, 58, 65, 56, 57, 64, 54, 55, 62, 52, 53, 60, 51, 51, 58, 49, 49, 55, 48, 47, 52, 47, 45, 49, 45, 43, 46, 45, 42, 44, 44, +42, 43, 44, 42, 43, 44, 42, 44, 44, 43, 44, 45, 43, 45, 45, 44, 47, 46, 45, 48, 46, 46, 49, 47, 47, 50, 47, 47, 50, 47, 47, 50, 47, 46, 50, 46, 46, 49, 45, 45, 48, 45, 44, +46, 45, 44, 46, 45, 43, 45, 45, 43, 45, 45, 43, 44, 45, 43, 45, 46, 43, 46, 46, 44, 48, 47, 46, 51, 48, 48, 55, 50, 50, 58, 51, 52, 60, 53, 54, 62, 54, 55, 63, 56, 57, 65, +60, 61, 67, 58, 59, 65, 56, 57, 64, 55, 56, 63, 52, 53, 61, 51, 51, 58, 49, 49, 56, 48, 47, 53, 46, 45, 50, 45, 44, 47, 44, 43, 45, 44, 42, 44, 44, 43, 44, 44, 43, 44, 44, +43, 45, 44, 43, 45, 45, 44, 47, 45, 45, 48, 46, 45, 48, 46, 46, 49, 47, 46, 50, 46, 46, 50, 46, 45, 49, 45, 45, 48, 45, 45, 48, 45, 44, 47, 44, 44, 46, 44, 44, 46, 45, 43, +46, 45, 43, 46, 45, 43, 46, 45, 44, 47, 46, 44, 49, 47, 46, 52, 48, 48, 55, 49, 50, 58, 51, 52, 60, 53, 54, 62, 54, 56, 64, 56, 58, 65, 61, 62, 67, 59, 60, 66, 57, 58, 65, +55, 56, 63, 53, 54, 61, 51, 52, 59, 49, 49, 56, 48, 47, 53, 46, 45, 50, 45, 44, 48, 44, 43, 46, 44, 43, 45, 44, 43, 45, 44, 43, 46, 44, 43, 46, 44, 44, 46, 45, 44, 47, 45, +45, 48, 46, 45, 48, 46, 45, 49, 46, 46, 49, 46, 46, 49, 45, 45, 49, 45, 45, 48, 45, 45, 48, 45, 44, 47, 45, 44, 47, 45, 44, 47, 45, 44, 47, 45, 44, 47, 45, 43, 47, 45, 44, +47, 46, 45, 49, 47, 46, 52, 48, 48, 56, 50, 50, 58, 51, 52, 61, 53, 54, 63, 55, 56, 64, 57, 58, 65, 61, 62, 68, 60, 60, 67, 58, 59, 66, 56, 57, 64, 54, 55, 62, 52, 52, 60, +49, 50, 57, 48, 48, 54, 46, 46, 51, 45, 44, 49, 44, 43, 47, 44, 43, 46, 44, 44, 47, 44, 44, 47, 45, 44, 47, 45, 45, 47, 45, 45, 48, 45, 45, 48, 45, 45, 48, 46, 46, 49, 46, +46, 49, 46, 46, 49, 46, 46, 48, 45, 45, 48, 45, 45, 48, 45, 45, 48, 45, 45, 48, 45, 45, 48, 45, 45, 48, 45, 44, 48, 44, 44, 47, 45, 44, 48, 46, 45, 50, 47, 47, 54, 48, 49, +57, 50, 51, 59, 52, 53, 61, 54, 55, 63, 56, 57, 65, 57, 59, 66, 62, 63, 68, 60, 61, 67, 58, 59, 66, 57, 58, 65, 54, 56, 63, 52, 53, 61, 50, 51, 58, 48, 49, 55, 47, 47, 52, +45, 45, 50, 44, 44, 48, 44, 44, 47, 44, 44, 47, 45, 45, 48, 46, 46, 48, 46, 46, 49, 46, 46, 49, 46, 47, 49, 47, 47, 49, 47, 47, 49, 47, 47, 49, 47, 47, 49, 47, 47, 49, 46, +47, 49, 46, 47, 50, 46, 47, 50, 46, 46, 49, 45, 46, 49, 45, 45, 49, 45, 44, 49, 45, 44, 49, 45, 44, 49, 46, 46, 52, 47, 48, 55, 49, 50, 58, 50, 52, 60, 52, 54, 62, 54, 56, +64, 56, 58, 65, 58, 59, 66, 62, 63, 68, 61, 62, 67, 59, 60, 66, 57, 58, 65, 55, 56, 64, 53, 54, 62, 51, 52, 59, 49, 50, 56, 47, 48, 54, 46, 46, 51, 45, 44, 49, 44, 44, 48, +44, 44, 48, 44, 45, 48, 45, 46, 49, 46, 46, 49, 46, 47, 49, 47, 47, 50, 47, 48, 50, 47, 48, 49, 47, 48, 49, 47, 48, 50, 47, 48, 50, 47, 48, 50, 46, 47, 50, 46, 47, 50, 45, +46, 50, 45, 46, 50, 44, 45, 49, 44, 45, 49, 44, 45, 49, 45, 45, 51, 46, 47, 53, 48, 49, 56, 49, 51, 59, 51, 53, 61, 53, 55, 63, 55, 57, 65, 57, 58, 66, 59, 60, 67, 63, 63, +68, 61, 62, 68, 60, 61, 67, 58, 59, 66, 56, 57, 64, 54, 55, 62, 52, 53, 60, 50, 51, 58, 48, 49, 55, 46, 47, 52, 45, 45, 50, 44, 44, 49, 44, 44, 48, 44, 44, 48, 45, 45, 49, +45, 46, 49, 46, 47, 50, 47, 48, 50, 47, 48, 50, 48, 48, 50, 48, 49, 50, 48, 49, 50, 48, 48, 50, 47, 48, 50, 46, 47, 50, 46, 46, 50, 45, 46, 50, 45, 45, 50, 44, 45, 50, 44, +45, 49, 45, 45, 50, 45, 46, 52, 47, 48, 54, 48, 50, 57, 50, 52, 60, 52, 53, 62, 54, 55, 64, 56, 57, 65, 58, 59, 66, 59, 61, 67, 63, 64, 69, 62, 63, 68, 61, 62, 68, 59, 60, +66, 57, 58, 65, 55, 56, 63, 53, 54, 61, 51, 52, 59, 49, 50, 56, 47, 48, 54, 46, 46, 51, 44, 44, 49, 44, 44, 49, 44, 44, 48, 44, 44, 48, 45, 45, 49, 46, 46, 49, 47, 47, 50, +47, 48, 50, 48, 48, 50, 48, 49, 50, 48, 48, 50, 48, 48, 50, 47, 47, 50, 46, 46, 49, 45, 46, 49, 45, 45, 50, 44, 45, 50, 44, 45, 50, 44, 44, 50, 45, 45, 51, 46, 47, 53, 48, +49, 56, 49, 51, 59, 51, 53, 61, 52, 54, 63, 55, 56, 64, 56, 58, 66, 58, 60, 66, 60, 61, 67, 64, 64, 69, 62, 63, 68, 61, 62, 68, 59, 60, 67, 57, 59, 66, 55, 57, 64, 53, 55, +62, 52, 53, 60, 50, 51, 58, 48, 49, 55, 46, 47, 53, 45, 45, 50, 43, 44, 48, 43, 43, 48, 43, 43, 48, 44, 44, 48, 44, 45, 48, 45, 45, 48, 46, 46, 49, 46, 46, 49, 47, 47, 49, +47, 47, 49, 46, 46, 49, 46, 46, 48, 45, 45, 48, 44, 45, 48, 44, 44, 49, 44, 44, 49, 44, 44, 49, 44, 45, 50, 45, 46, 52, 47, 48, 55, 48, 50, 57, 50, 52, 60, 51, 53, 62, 53, +55, 64, 55, 57, 65, 57, 59, 66, 59, 60, 66, 60, 61, 67, 64, 65, 69, 63, 63, 68, 62, 63, 68, 60, 61, 67, 58, 60, 66, 56, 58, 65, 54, 56, 63, 52, 54, 62, 51, 52, 59, 49, 50, +57, 47, 48, 54, 45, 46, 51, 44, 44, 49, 43, 43, 48, 43, 43, 47, 43, 43, 47, 43, 43, 47, 44, 44, 47, 44, 44, 47, 45, 44, 47, 45, 44, 47, 45, 44, 47, 45, 44, 47, 44, 44, 47, +44, 44, 47, 43, 44, 47, 43, 44, 48, 43, 44, 48, 44, 44, 50, 45, 46, 52, 46, 47, 54, 48, 49, 56, 49, 51, 59, 50, 52, 61, 52, 54, 63, 54, 56, 64, 56, 57, 65, 58, 59, 66, 59, +60, 67, 61, 62, 67, 65, 65, 69, 63, 64, 69, 62, 63, 68, 60, 61, 67, 59, 60, 66, 57, 58, 65, 55, 56, 64, 53, 55, 63, 51, 53, 60, 50, 51, 58, 48, 49, 56, 46, 47, 53, 45, 45, +51, 44, 44, 48, 43, 43, 47, 43, 43, 46, 43, 43, 46, 43, 42, 45, 43, 43, 45, 43, 43, 46, 44, 43, 46, 44, 43, 46, 43, 43, 46, 43, 43, 46, 43, 43, 46, 43, 43, 46, 43, 43, 47, +44, 44, 49, 44, 45, 51, 46, 47, 53, 47, 48, 55, 48, 50, 58, 50, 52, 60, 51, 53, 62, 53, 55, 63, 54, 56, 64, 56, 58, 65, 58, 60, 67, 59, 60, 67, 61, 62, 67, 65, 66, 70, 64, +64, 69, 62, 63, 68, 60, 62, 67, 59, 60, 67, 57, 59, 65, 56, 57, 65, 54, 56, 63, 52, 54, 61, 51, 52, 60, 49, 50, 57, 47, 48, 55, 46, 46, 52, 45, 44, 50, 44, 43, 48, 43, 43, +46, 43, 42, 46, 43, 42, 45, 43, 43, 45, 43, 43, 45, 43, 43, 46, 44, 43, 46, 43, 43, 46, 43, 43, 46, 43, 43, 46, 43, 43, 47, 43, 43, 48, 44, 44, 50, 46, 46, 52, 47, 48, 55, +48, 50, 57, 49, 51, 59, 50, 53, 61, 52, 54, 62, 53, 55, 64, 55, 57, 65, 57, 58, 66, 59, 60, 67, 60, 61, 67, 62, 62, 67, 65, 66, 70, 64, 65, 69, 63, 64, 68, 61, 62, 67, 60, +61, 67, 58, 59, 66, 56, 58, 65, 55, 56, 64, 53, 55, 62, 52, 53, 61, 50, 52, 59, 49, 50, 57, 47, 48, 55, 46, 46, 52, 45, 44, 49, 44, 43, 48, 43, 43, 46, 43, 43, 46, 43, 43, +45, 43, 43, 45, 44, 43, 46, 44, 43, 46, 44, 43, 46, 44, 43, 46, 43, 43, 46, 44, 43, 47, 44, 44, 49, 46, 46, 52, 47, 48, 54, 48, 49, 57, 49, 51, 59, 50, 52, 60, 51, 54, 62, +53, 55, 63, 54, 56, 64, 56, 58, 65, 58, 59, 66, 59, 60, 66, 61, 61, 67, 62, 63, 68, 66, 66, 70, 64, 65, 69, 63, 64, 69, 62, 63, 68, 60, 61, 67, 58, 59, 66, 57, 58, 65, 55, +57, 64, 54, 55, 63, 52, 54, 62, 51, 53, 60, 50, 51, 58, 48, 50, 57, 47, 48, 55, 46, 46, 52, 45, 45, 50, 44, 44, 48, 44, 43, 47, 44, 43, 46, 44, 43, 46, 44, 43, 46, 44, 43, +46, 44, 43, 46, 44, 43, 46, 44, 44, 47, 45, 45, 49, 46, 46, 52, 47, 48, 54, 48, 49, 56, 49, 51, 58, 50, 52, 60, 51, 53, 61, 52, 54, 63, 54, 55, 63, 55, 57, 64, 57, 58, 65, +59, 60, 66, 60, 61, 66, 61, 62, 67, 63, 63, 68, 66, 67, 71, 65, 66, 70, 64, 65, 69, 63, 64, 68, 61, 62, 67, 59, 60, 66, 57, 58, 65, 56, 58, 64, 55, 56, 63, 53, 55, 62, 52, +54, 61, 51, 52, 60, 49, 51, 58, 49, 50, 57, 47, 48, 55, 47, 47, 52, 46, 45, 50, 45, 44, 49, 45, 44, 48, 45, 44, 47, 44, 44, 47, 44, 44, 47, 45, 44, 47, 45, 44, 48, 45, 45, +50, 46, 46, 52, 47, 48, 54, 49, 49, 56, 49, 51, 58, 50, 52, 60, 51, 53, 61, 52, 54, 62, 53, 55, 63, 55, 56, 64, 56, 57, 64, 58, 59, 65, 60, 61, 66, 61, 62, 67, 63, 63, 68, +64, 64, 69, 67, 67, 71, 66, 66, 71, 65, 65, 70, 64, 64, 69, 62, 63, 68, 60, 61, 67, 58, 59, 65, 57, 58, 65, 56, 57, 64, 54, 56, 63, 53, 54, 62, 51, 53, 60, 50, 52, 60, 50, +51, 58, 49, 50, 57, 48, 48, 55, 47, 47, 53, 46, 46, 51, 46, 45, 50, 46, 45, 49, 46, 45, 49, 45, 45, 49, 46, 45, 50, 46, 46, 51, 47, 47, 53, 48, 48, 55, 49, 50, 57, 50, 51, +58, 51, 52, 60, 51, 53, 61, 52, 54, 62, 53, 55, 63, 54, 56, 63, 56, 57, 64, 57, 58, 65, 59, 59, 65, 60, 61, 66, 62, 63, 68, 63, 64, 68, 65, 65, 70, 68, 68, 72, 67, 67, 71, +66, 66, 71, 64, 65, 70, 63, 64, 69, 61, 62, 67, 59, 60, 66, 58, 59, 65, 57, 58, 64, 55, 56, 63, 54, 55, 62, 53, 54, 62, 51, 53, 61, 50, 52, 59, 50, 51, 58, 49, 50, 57, 48, +49, 56, 48, 48, 54, 47, 47, 53, 47, 47, 52, 47, 47, 52, 47, 46, 52, 47, 47, 53, 48, 48, 54, 48, 49, 56, 49, 50, 57, 50, 51, 58, 51, 52, 60, 51, 53, 61, 52, 53, 61, 53, 54, +62, 54, 55, 63, 56, 57, 63, 57, 58, 64, 58, 59, 65, 60, 60, 66, 62, 62, 67, 63, 63, 68, 64, 65, 69, 66, 66, 71}; + +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_post_process.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_post_process.h new file mode 100644 index 0000000000000000000000000000000000000000..709dfec84548c32e038987bd852d037908e666d7 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_post_process.h @@ -0,0 +1,135 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef FACE_POST_PROCESS_H_ +#define FACE_POST_PROCESS_H_ + +#include "face_recognition_params.h" + +#include +#include + +#include "atlasutil/atlas_thread.h" +#include "atlasutil/atlas_app.h" +#include "atlasutil/dvpp_process.h" + +#include "facial_recognition_message.pb.h" +#include "ascenddk/presenter/agent/presenter_channel.h" +#include "presenter_channels.h" + + +class FacePostProcess : public AtlasThread { +public: + /** + * @brief: constructor + * @param [in]: configFile: App config file + */ + FacePostProcess(const std::string& configFile); + + /** + * @brief: destruction function + */ + ~FacePostProcess(); + + /** + * @brief: Post process thread init function + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed. The thread will exit + */ + AtlasError Init(); + + /** + * @brief: The message process entry of face detection thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError Process(int msgId, std::shared_ptr msgData); + +private: + /** + * @brief: Send face feature in camera frame to presenter server + * @param [in]: recogInfo:face feature information and origin image + * @return: ATLAS_OK: send success + * ATLAS_ERROR: send failed + */ + AtlasError SendFeature(std::shared_ptr recogInfo); + + /** + * @brief: Send face feature in face register image to presenter server + * @param [in]: recogInfo:face feature information and origin image + * @return: ATLAS_OK: send success + * ATLAS_ERROR: send failed + */ + AtlasError ReplyFeature(std::shared_ptr recogInfo); + + /** + * @brief: Process MSG_FACE_RECOGNIZE_DATA message + * @param [in]: recogInfo: message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError FaceRecognitionMsgProcess( + std::shared_ptr recogInfo); + + /** + * @brief: Fill face feature data to the message that send to + * presenter server + * @param [out]: message instance that send to presenter server + * @param [in]: faceImg: message data + */ + void PrepareFaceBoxData( + ascend::presenter::facial_recognition::FaceFeature* feature, + FaceImage& faceImg); + + /** + * @brief: Send message to presenter server + * @param [in]: recogInfo: message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError SendMessage(google::protobuf::Message& message); + +private: + std::string configFile_; + DvppProcess dvpp_; + }; + + +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_recognition.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_recognition.h new file mode 100644 index 0000000000000000000000000000000000000000..8744aeedd95ac783e5081e2feee9180551c0bae9 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_recognition.h @@ -0,0 +1,216 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef FACE_RECOGNITION_ENGINE_H_ +#define FACE_RECOGNITION_ENGINE_H_ + +#include +#include "atlasutil/atlas_model.h" +#include "atlasutil/dvpp_process.h" +#include "face_recognition_params.h" +#include "facial_thread_base.h" + +// aligned face data +struct AlignedFace { +// face index (using for set result) + int32_t faceIndex; +// aligned face + cv::Mat alignedFace; +// flip face according to aligned face + cv::Mat alignedFlipFace; +}; + +/** + * @brief: inference engine class + */ +class FaceRecognition : public FacialThreadBase { +public: + /** + * @brief: constructor + * @param [in]: configFile: App config file + */ + FaceRecognition(const std::string& configFile); + + /** + * @brief: destruction function + */ + ~FaceRecognition(); + + /** + * @brief: Face recognition thread init function + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed. The thread will exit + */ + AtlasError Init(); + + /** + * @brief: The message process entry of face recognition thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError Process(int msgId, shared_ptr msgData); + +private: + /** + * @brief: Create face image affine templete + * @return: None + */ + void CreateFaceAffineDestTemplate(); + + /** + * @brief: Preprocess face images, include resize, flip, affine and align + * @param [out]: alignedImgs: preprocessed image data + * @param [out]: faceImages: face image and feature mask data + */ + void PreProcess(std::vector &alignedImgs, + std::vector &faceImages); + + /** + * @brief Check transformation matrix for openCV wapAffine + * @param [in]: mat: transformation matrix + * @return true: match + * false: not match + */ + bool CheckTransfromMat(cv::Mat &mat); + + /** + * @brief: Affine, aligned and flip face + * @param [out]: alignedImgs: result image data + * @param [in]: faceImg: face data, include cropped face image and feature + * @param [in]: resizedImage: resized face image + * @param [in]: index: current process face index in face image vector + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError AlignedAndFlipFace(std::vector &alignedImgs, + FaceImage &faceImg, + ImageData &resizedImage, + int32_t index); + + /** + * @brief: Affine face image + * @param [out]: affinedImg: result image data + * @param [in]: resizedImage: resized face image + * @param [in]: faceImg: face data, include cropped face image and feature + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError FaceFeatureMaskAffine(cv::Mat& affinedImg, + ImageData &resizedImage, + FaceImage &faceImg); + + /** + * @brief: Estimate affine face image + * @param [in]: affineMat: affine matrix + * @param [in]: faceImg: face data, include cropped face image and feature + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError FaceFeatureMaskEstimateAffine(cv::Mat& affineMat, + FaceImage &faceImg); + + /** + * @brief: Resize face feature mask data + * @param [in]: destPoints: data after resize + * @param [in]: faceImg: face data, include cropped face image and feature + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError FaceFeatureMaskResize(std::vector& destPoints, + FaceImage &faceImg); + + /** + * @brief: Copy one batch data to acl device memory + * @param [out]: buffer: acl device memory + * @param [in]: bufferSize: acl device memory soze + * @param [in]: eachImgSize: each aligned image data size + * @param [in]: alignedImgs: aligned image vector + * @param [in]: startIdx: current batch start index in image vector + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError CopyOneBatchImage(uint8_t* dataBuffer, + uint32_t bufferSize, + uint32_t eachImgSize, + std::vector &alignedImgs, + int32_t startIdx); + + /** + * @brief: Process mode inference result + * @param [in/out]: faceImgs: face images information + * @param [in]: inferenceOutput: inference output data for each batch + * @param [in]: alignedImgs: aligned face and flip images + * @param [out]: startIdx: current batch start index in faceImgs + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError PostProcess(std::vector &faceImgs, + std::vector& inferenceOutput, + std::vector &alignedImgs, + int32_t startIdx); + + /** + * @brief: Inference preprocessed face images + * @param [out]: faceImages: faces information + * @param [in]: alignedImgs: preprocessed face image data + * @return: ATLAS_OK: inference success + * ATLAS_ERROR: inference failed + */ + AtlasError Inference(std::vector &faceImages, + std::vector &alignedImgs); + + /** + * @brief: Process message MSG_FACE_FEATURE_MASK + * @param [in]: recogInfo: message data + * @return: None + */ + void FaceFeatureMaskMsgProcess( + std::shared_ptr recogInfo); + +private: + AtlasModel model_; + DvppProcess dvpp_; + vector faceAffineDest_; + + uint32_t imgSize_; + uint8_t* inputBuf_; + uint32_t inputSize_; + uint8_t* inputPad_; +}; + +#endif /* FACE_RECOGNITION_ENGINE_H_ */ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_recognition_params.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_recognition_params.h new file mode 100644 index 0000000000000000000000000000000000000000..2f03c7392a8d11867d8084b12b230e5411f96d93 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_recognition_params.h @@ -0,0 +1,150 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef FACE_RECOGNITION_PARAMS_H_ +#define FACE_RECOGNITION_PARAMS_H_ + +#include +#include +#include + +#include "face_feature_train_mean.h" +#include "face_feature_train_std.h" +#include "atlasutil/atlas_utils.h" +#include "opencv2/opencv.hpp" + +#include "opencv2/opencv.hpp" +#include "opencv2/imgproc.hpp" +#include "opencv2/core/types_c.h" +#include "opencv2/imgproc/types_c.h" +#include + + +using namespace std; +#define CHECK_MEM_OPERATOR_RESULTS(ret) \ +if (ret != SUCCESS) { \ + ATLAS_LOG_ERROR("memory operation failed, error=%d", ret); \ + return ATLAS_ERROR; \ +} + +#define MSG_READ_FRAME 1 +#define MSG_FRAME_DATA 2 +#define MSG_FACE_DETECT_DATA 3 +#define MSG_FACE_FEATURE_MASK 4 +#define MSG_FACE_RECOGNIZE_DATA 5 +#define MSG_FACE_REGISTER_DAEMON 6 +#define MSG_FACE_REG_IMAGE 7 +#define MSG_FACE_REG_INVALID 8 + +const string kCameraThreadName = "mind_camera"; +const string kDetectThreadName = "face_detect"; +const string kFeatureMaskThreadName = "face_feature_mask"; +const string kRecognitionThreadName = "face_recognition"; +const string kPostProcessThreadName = "face_post_process"; +const string kRegisterThreadName = "face_register"; + +/** + * @brief: frame information + */ +struct FrameInfo { + uint32_t imageSource = 0; // 0:Camera 1:Register + std::string faceId = ""; // registered face id + ImageData image; +}; + +/** + * @brief: face recognition APP error code definition + */ +enum class AppErrorCode { + // Success, no error + kNone = 0, + + // register engine failed + kRegister, + + // detection engine failed + kDetection, + + // feature mask engine failed + kFeatureMask, + + // recognition engine failed + kRecognition +}; + +struct ErrorInfo { + AppErrorCode errCode = AppErrorCode::kNone; + std::string errMsg = ""; +}; + +/** + * @brief: face rectangle + */ +struct FaceRectangle { + cv::Point lt; // left top + cv::Point rb; // right bottom +}; + + +/** + * @brief: face feature + */ +struct FaceFeature { + cv::Point leftEye; // left eye + cv::Point rightEye; // right eye + cv::Point nose; // nose + cv::Point leftMouth; // left mouth + cv::Point rightMouth; // right mouth +}; + +/** + * @brief: face image + */ +struct FaceImage { + ImageData image; // cropped image from original image + FaceRectangle rectangle; // face rectangle + FaceFeature featureMask; // face feature mask + std::vector featureVector; // face feature vector +}; + +/** + * @brief: information for face recognition + */ +struct FaceRecognitionInfo { + ErrorInfo errInfo; + FrameInfo frame; // frame information + ImageData orgImg; // original image + std::vector faceImgs; // cropped image +}; + +#endif /* FACE_RECOGNITION_PARAMS_H_ */ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_register.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_register.h new file mode 100644 index 0000000000000000000000000000000000000000..953a70ca274a1943d596d486a5e03d407d853b9c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/face_register.h @@ -0,0 +1,133 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef FACE_REGISTER_ENGINE_H_ +#define FACE_REGISTER_ENGINE_H_ +#include +#include +#include +#include +#include +#include +#include + +#include "atlasutil/dvpp_process.h" +#include "face_recognition_params.h" +#include "facial_thread_base.h" +#include "presenter_channels.h" + + +class FaceRegister : public FacialThreadBase { +public: + /** + * @brief: constructor + * @param [in] configFile: App config file + */ + FaceRegister(const std::string& configFile); + + /** + * @brief: destruction function + */ + ~FaceRegister(); + + /** + * @brief: Face register thread init function + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed. The thread will exit + */ + AtlasError Init(); + + /** + * @brief: The message process entry of face register thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError Process(int msgId, std::shared_ptr msgData); + +private: + /** + * @brief: Receive face register request from presenter server + * @param [in]: agentChannel: the connection between persenter + * server and app + * @return: face register request data + */ + ascend::presenter::facial_recognition::FaceInfo* ReceiveFaceRegisterRequest( + ascend::presenter::Channel* agentChannel); + + /** + * @brief: Copy the face register image to acl device, + * and convert to YUV420SP + * @param [out]: recogInfo: data with yuv image + * @param [in]: faceRegReq: face register request + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError ProcessFaceImage(std::shared_ptr recogInfo, + ascend::presenter::facial_recognition::FaceInfo* faceRegReq); + + /** + * @brief: Notify face register failed.Send message to post process thread + * first, then post process will send message to presenter server + * @param [in]: recogInfo: notify info + * @param [in]: errMsg: the reason of failed + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError SendErrorReply(std::shared_ptr recogInfo, + const std::string& errMsg); + + /** + * @brief: Daemon face register request from presenter server and process it + * @return: ATLAS_OK: process success + * ATLAS_ERROR: process failed + */ + AtlasError FaceRegisterDaemon(); + + /** + * @brief: Create message connection between presenter server and app + * @return: ATLAS_OK: create success + * ATLAS_ERROR: create failed + */ + AtlasError OpenPresenterChannel(); + +private: + DvppProcess dvpp_; + std::string configFile_; +}; + +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/facial_recognition_message.pb.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/facial_recognition_message.pb.h new file mode 100644 index 0000000000000000000000000000000000000000..bef2738a20255e2ba67a6cd36c29c4bd5eac0b53 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/facial_recognition_message.pb.h @@ -0,0 +1,1834 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: facial_recognition_message.proto + +#ifndef GOOGLE_PROTOBUF_INCLUDED_facial_5frecognition_5fmessage_2eproto +#define GOOGLE_PROTOBUF_INCLUDED_facial_5frecognition_5fmessage_2eproto + +#include +#include + +#include +#if PROTOBUF_VERSION < 3008000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 3008000 < PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // IWYU pragma: export +#include // IWYU pragma: export +#include +#include +// @@protoc_insertion_point(includes) +#include +#define PROTOBUF_INTERNAL_EXPORT_facial_5frecognition_5fmessage_2eproto +PROTOBUF_NAMESPACE_OPEN +namespace internal { +class AnyMetadata; +} // namespace internal +PROTOBUF_NAMESPACE_CLOSE + +// Internal implementation detail -- do not use these members. +struct TableStruct_facial_5frecognition_5fmessage_2eproto { + static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[7] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[]; + static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[]; + static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[]; +}; +extern const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_facial_5frecognition_5fmessage_2eproto; +namespace ascend { +namespace presenter { +namespace facial_recognition { +class Box; +class BoxDefaultTypeInternal; +extern BoxDefaultTypeInternal _Box_default_instance_; +class CommonResponse; +class CommonResponseDefaultTypeInternal; +extern CommonResponseDefaultTypeInternal _CommonResponse_default_instance_; +class FaceFeature; +class FaceFeatureDefaultTypeInternal; +extern FaceFeatureDefaultTypeInternal _FaceFeature_default_instance_; +class FaceInfo; +class FaceInfoDefaultTypeInternal; +extern FaceInfoDefaultTypeInternal _FaceInfo_default_instance_; +class FaceResult; +class FaceResultDefaultTypeInternal; +extern FaceResultDefaultTypeInternal _FaceResult_default_instance_; +class FrameInfo; +class FrameInfoDefaultTypeInternal; +extern FrameInfoDefaultTypeInternal _FrameInfo_default_instance_; +class RegisterApp; +class RegisterAppDefaultTypeInternal; +extern RegisterAppDefaultTypeInternal _RegisterApp_default_instance_; +} // namespace facial_recognition +} // namespace presenter +} // namespace ascend +PROTOBUF_NAMESPACE_OPEN +template<> ::ascend::presenter::facial_recognition::Box* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::Box>(Arena*); +template<> ::ascend::presenter::facial_recognition::CommonResponse* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::CommonResponse>(Arena*); +template<> ::ascend::presenter::facial_recognition::FaceFeature* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::FaceFeature>(Arena*); +template<> ::ascend::presenter::facial_recognition::FaceInfo* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::FaceInfo>(Arena*); +template<> ::ascend::presenter::facial_recognition::FaceResult* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::FaceResult>(Arena*); +template<> ::ascend::presenter::facial_recognition::FrameInfo* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::FrameInfo>(Arena*); +template<> ::ascend::presenter::facial_recognition::RegisterApp* Arena::CreateMaybeMessage<::ascend::presenter::facial_recognition::RegisterApp>(Arena*); +PROTOBUF_NAMESPACE_CLOSE +namespace ascend { +namespace presenter { +namespace facial_recognition { + +enum ErrorCode : int { + kErrorNone = 0, + kErrorAppRegisterExist = 1, + kErrorAppRegisterType = 2, + kErrorAppRegisterLimit = 3, + kErrorOther = 5, + ErrorCode_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(), + ErrorCode_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max() +}; +bool ErrorCode_IsValid(int value); +constexpr ErrorCode ErrorCode_MIN = kErrorNone; +constexpr ErrorCode ErrorCode_MAX = kErrorOther; +constexpr int ErrorCode_ARRAYSIZE = ErrorCode_MAX + 1; + +const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ErrorCode_descriptor(); +template +inline const std::string& ErrorCode_Name(T enum_t_value) { + static_assert(::std::is_same::value || + ::std::is_integral::value, + "Incorrect type passed to function ErrorCode_Name."); + return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum( + ErrorCode_descriptor(), enum_t_value); +} +inline bool ErrorCode_Parse( + const std::string& name, ErrorCode* value) { + return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum( + ErrorCode_descriptor(), name, value); +} +// =================================================================== + +class CommonResponse : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.CommonResponse) */ { + public: + CommonResponse(); + virtual ~CommonResponse(); + + CommonResponse(const CommonResponse& from); + CommonResponse(CommonResponse&& from) noexcept + : CommonResponse() { + *this = ::std::move(from); + } + + inline CommonResponse& operator=(const CommonResponse& from) { + CopyFrom(from); + return *this; + } + inline CommonResponse& operator=(CommonResponse&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const CommonResponse& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const CommonResponse* internal_default_instance() { + return reinterpret_cast( + &_CommonResponse_default_instance_); + } + static constexpr int kIndexInFileMessages = + 0; + + void Swap(CommonResponse* other); + friend void swap(CommonResponse& a, CommonResponse& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline CommonResponse* New() const final { + return CreateMaybeMessage(nullptr); + } + + CommonResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const CommonResponse& from); + void MergeFrom(const CommonResponse& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(CommonResponse* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.CommonResponse"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string message = 2; + void clear_message(); + static const int kMessageFieldNumber = 2; + const std::string& message() const; + void set_message(const std::string& value); + void set_message(std::string&& value); + void set_message(const char* value); + void set_message(const char* value, size_t size); + std::string* mutable_message(); + std::string* release_message(); + void set_allocated_message(std::string* message); + + // .ascend.presenter.facial_recognition.ErrorCode ret = 1; + void clear_ret(); + static const int kRetFieldNumber = 1; + ::ascend::presenter::facial_recognition::ErrorCode ret() const; + void set_ret(::ascend::presenter::facial_recognition::ErrorCode value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.CommonResponse) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr message_; + int ret_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class RegisterApp : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.RegisterApp) */ { + public: + RegisterApp(); + virtual ~RegisterApp(); + + RegisterApp(const RegisterApp& from); + RegisterApp(RegisterApp&& from) noexcept + : RegisterApp() { + *this = ::std::move(from); + } + + inline RegisterApp& operator=(const RegisterApp& from) { + CopyFrom(from); + return *this; + } + inline RegisterApp& operator=(RegisterApp&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const RegisterApp& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const RegisterApp* internal_default_instance() { + return reinterpret_cast( + &_RegisterApp_default_instance_); + } + static constexpr int kIndexInFileMessages = + 1; + + void Swap(RegisterApp* other); + friend void swap(RegisterApp& a, RegisterApp& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline RegisterApp* New() const final { + return CreateMaybeMessage(nullptr); + } + + RegisterApp* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const RegisterApp& from); + void MergeFrom(const RegisterApp& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(RegisterApp* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.RegisterApp"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string id = 1; + void clear_id(); + static const int kIdFieldNumber = 1; + const std::string& id() const; + void set_id(const std::string& value); + void set_id(std::string&& value); + void set_id(const char* value); + void set_id(const char* value, size_t size); + std::string* mutable_id(); + std::string* release_id(); + void set_allocated_id(std::string* id); + + // string type = 2; + void clear_type(); + static const int kTypeFieldNumber = 2; + const std::string& type() const; + void set_type(const std::string& value); + void set_type(std::string&& value); + void set_type(const char* value); + void set_type(const char* value, size_t size); + std::string* mutable_type(); + std::string* release_type(); + void set_allocated_type(std::string* type); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.RegisterApp) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr id_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr type_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class Box : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.Box) */ { + public: + Box(); + virtual ~Box(); + + Box(const Box& from); + Box(Box&& from) noexcept + : Box() { + *this = ::std::move(from); + } + + inline Box& operator=(const Box& from) { + CopyFrom(from); + return *this; + } + inline Box& operator=(Box&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const Box& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const Box* internal_default_instance() { + return reinterpret_cast( + &_Box_default_instance_); + } + static constexpr int kIndexInFileMessages = + 2; + + void Swap(Box* other); + friend void swap(Box& a, Box& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline Box* New() const final { + return CreateMaybeMessage(nullptr); + } + + Box* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const Box& from); + void MergeFrom(const Box& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(Box* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.Box"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // uint32 lt_x = 1; + void clear_lt_x(); + static const int kLtXFieldNumber = 1; + ::PROTOBUF_NAMESPACE_ID::uint32 lt_x() const; + void set_lt_x(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // uint32 lt_y = 2; + void clear_lt_y(); + static const int kLtYFieldNumber = 2; + ::PROTOBUF_NAMESPACE_ID::uint32 lt_y() const; + void set_lt_y(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // uint32 rb_x = 3; + void clear_rb_x(); + static const int kRbXFieldNumber = 3; + ::PROTOBUF_NAMESPACE_ID::uint32 rb_x() const; + void set_rb_x(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // uint32 rb_y = 4; + void clear_rb_y(); + static const int kRbYFieldNumber = 4; + ::PROTOBUF_NAMESPACE_ID::uint32 rb_y() const; + void set_rb_y(::PROTOBUF_NAMESPACE_ID::uint32 value); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.Box) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::uint32 lt_x_; + ::PROTOBUF_NAMESPACE_ID::uint32 lt_y_; + ::PROTOBUF_NAMESPACE_ID::uint32 rb_x_; + ::PROTOBUF_NAMESPACE_ID::uint32 rb_y_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class FaceFeature : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.FaceFeature) */ { + public: + FaceFeature(); + virtual ~FaceFeature(); + + FaceFeature(const FaceFeature& from); + FaceFeature(FaceFeature&& from) noexcept + : FaceFeature() { + *this = ::std::move(from); + } + + inline FaceFeature& operator=(const FaceFeature& from) { + CopyFrom(from); + return *this; + } + inline FaceFeature& operator=(FaceFeature&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const FaceFeature& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const FaceFeature* internal_default_instance() { + return reinterpret_cast( + &_FaceFeature_default_instance_); + } + static constexpr int kIndexInFileMessages = + 3; + + void Swap(FaceFeature* other); + friend void swap(FaceFeature& a, FaceFeature& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline FaceFeature* New() const final { + return CreateMaybeMessage(nullptr); + } + + FaceFeature* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const FaceFeature& from); + void MergeFrom(const FaceFeature& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(FaceFeature* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.FaceFeature"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated float vector = 2; + int vector_size() const; + void clear_vector(); + static const int kVectorFieldNumber = 2; + float vector(int index) const; + void set_vector(int index, float value); + void add_vector(float value); + const ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >& + vector() const; + ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >* + mutable_vector(); + + // .ascend.presenter.facial_recognition.Box box = 1; + bool has_box() const; + void clear_box(); + static const int kBoxFieldNumber = 1; + const ::ascend::presenter::facial_recognition::Box& box() const; + ::ascend::presenter::facial_recognition::Box* release_box(); + ::ascend::presenter::facial_recognition::Box* mutable_box(); + void set_allocated_box(::ascend::presenter::facial_recognition::Box* box); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FaceFeature) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::RepeatedField< float > vector_; + mutable std::atomic _vector_cached_byte_size_; + ::ascend::presenter::facial_recognition::Box* box_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class FaceInfo : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.FaceInfo) */ { + public: + FaceInfo(); + virtual ~FaceInfo(); + + FaceInfo(const FaceInfo& from); + FaceInfo(FaceInfo&& from) noexcept + : FaceInfo() { + *this = ::std::move(from); + } + + inline FaceInfo& operator=(const FaceInfo& from) { + CopyFrom(from); + return *this; + } + inline FaceInfo& operator=(FaceInfo&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const FaceInfo& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const FaceInfo* internal_default_instance() { + return reinterpret_cast( + &_FaceInfo_default_instance_); + } + static constexpr int kIndexInFileMessages = + 4; + + void Swap(FaceInfo* other); + friend void swap(FaceInfo& a, FaceInfo& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline FaceInfo* New() const final { + return CreateMaybeMessage(nullptr); + } + + FaceInfo* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const FaceInfo& from); + void MergeFrom(const FaceInfo& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(FaceInfo* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.FaceInfo"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string id = 1; + void clear_id(); + static const int kIdFieldNumber = 1; + const std::string& id() const; + void set_id(const std::string& value); + void set_id(std::string&& value); + void set_id(const char* value); + void set_id(const char* value, size_t size); + std::string* mutable_id(); + std::string* release_id(); + void set_allocated_id(std::string* id); + + // bytes image = 2; + void clear_image(); + static const int kImageFieldNumber = 2; + const std::string& image() const; + void set_image(const std::string& value); + void set_image(std::string&& value); + void set_image(const char* value); + void set_image(const void* value, size_t size); + std::string* mutable_image(); + std::string* release_image(); + void set_allocated_image(std::string* image); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FaceInfo) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr id_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr image_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class FaceResult : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.FaceResult) */ { + public: + FaceResult(); + virtual ~FaceResult(); + + FaceResult(const FaceResult& from); + FaceResult(FaceResult&& from) noexcept + : FaceResult() { + *this = ::std::move(from); + } + + inline FaceResult& operator=(const FaceResult& from) { + CopyFrom(from); + return *this; + } + inline FaceResult& operator=(FaceResult&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const FaceResult& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const FaceResult* internal_default_instance() { + return reinterpret_cast( + &_FaceResult_default_instance_); + } + static constexpr int kIndexInFileMessages = + 5; + + void Swap(FaceResult* other); + friend void swap(FaceResult& a, FaceResult& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline FaceResult* New() const final { + return CreateMaybeMessage(nullptr); + } + + FaceResult* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const FaceResult& from); + void MergeFrom(const FaceResult& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(FaceResult* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.FaceResult"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .ascend.presenter.facial_recognition.FaceFeature feature = 3; + int feature_size() const; + void clear_feature(); + static const int kFeatureFieldNumber = 3; + ::ascend::presenter::facial_recognition::FaceFeature* mutable_feature(int index); + ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >* + mutable_feature(); + const ::ascend::presenter::facial_recognition::FaceFeature& feature(int index) const; + ::ascend::presenter::facial_recognition::FaceFeature* add_feature(); + const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >& + feature() const; + + // string id = 1; + void clear_id(); + static const int kIdFieldNumber = 1; + const std::string& id() const; + void set_id(const std::string& value); + void set_id(std::string&& value); + void set_id(const char* value); + void set_id(const char* value, size_t size); + std::string* mutable_id(); + std::string* release_id(); + void set_allocated_id(std::string* id); + + // .ascend.presenter.facial_recognition.CommonResponse response = 2; + bool has_response() const; + void clear_response(); + static const int kResponseFieldNumber = 2; + const ::ascend::presenter::facial_recognition::CommonResponse& response() const; + ::ascend::presenter::facial_recognition::CommonResponse* release_response(); + ::ascend::presenter::facial_recognition::CommonResponse* mutable_response(); + void set_allocated_response(::ascend::presenter::facial_recognition::CommonResponse* response); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FaceResult) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature > feature_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr id_; + ::ascend::presenter::facial_recognition::CommonResponse* response_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// ------------------------------------------------------------------- + +class FrameInfo : + public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:ascend.presenter.facial_recognition.FrameInfo) */ { + public: + FrameInfo(); + virtual ~FrameInfo(); + + FrameInfo(const FrameInfo& from); + FrameInfo(FrameInfo&& from) noexcept + : FrameInfo() { + *this = ::std::move(from); + } + + inline FrameInfo& operator=(const FrameInfo& from) { + CopyFrom(from); + return *this; + } + inline FrameInfo& operator=(FrameInfo&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() { + return GetDescriptor(); + } + static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() { + return GetMetadataStatic().descriptor; + } + static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() { + return GetMetadataStatic().reflection; + } + static const FrameInfo& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const FrameInfo* internal_default_instance() { + return reinterpret_cast( + &_FrameInfo_default_instance_); + } + static constexpr int kIndexInFileMessages = + 6; + + void Swap(FrameInfo* other); + friend void swap(FrameInfo& a, FrameInfo& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline FrameInfo* New() const final { + return CreateMaybeMessage(nullptr); + } + + FrameInfo* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final; + void CopyFrom(const FrameInfo& from); + void MergeFrom(const FrameInfo& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final; + #else + bool MergePartialFromCodedStream( + ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final; + ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray( + ::PROTOBUF_NAMESPACE_ID::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + inline void SharedCtor(); + inline void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(FrameInfo* other); + friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata; + static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() { + return "ascend.presenter.facial_recognition.FrameInfo"; + } + private: + inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final; + private: + static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() { + ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_facial_5frecognition_5fmessage_2eproto); + return ::descriptor_table_facial_5frecognition_5fmessage_2eproto.file_level_metadata[kIndexInFileMessages]; + } + + public: + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .ascend.presenter.facial_recognition.FaceFeature feature = 2; + int feature_size() const; + void clear_feature(); + static const int kFeatureFieldNumber = 2; + ::ascend::presenter::facial_recognition::FaceFeature* mutable_feature(int index); + ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >* + mutable_feature(); + const ::ascend::presenter::facial_recognition::FaceFeature& feature(int index) const; + ::ascend::presenter::facial_recognition::FaceFeature* add_feature(); + const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >& + feature() const; + + // bytes image = 1; + void clear_image(); + static const int kImageFieldNumber = 1; + const std::string& image() const; + void set_image(const std::string& value); + void set_image(std::string&& value); + void set_image(const char* value); + void set_image(const void* value, size_t size); + std::string* mutable_image(); + std::string* release_image(); + void set_allocated_image(std::string* image); + + // @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FrameInfo) + private: + class HasBitSetters; + + ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_; + ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature > feature_; + ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr image_; + mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_; + friend struct ::TableStruct_facial_5frecognition_5fmessage_2eproto; +}; +// =================================================================== + + +// =================================================================== + +#ifdef __GNUC__ + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstrict-aliasing" +#endif // __GNUC__ +// CommonResponse + +// .ascend.presenter.facial_recognition.ErrorCode ret = 1; +inline void CommonResponse::clear_ret() { + ret_ = 0; +} +inline ::ascend::presenter::facial_recognition::ErrorCode CommonResponse::ret() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.CommonResponse.ret) + return static_cast< ::ascend::presenter::facial_recognition::ErrorCode >(ret_); +} +inline void CommonResponse::set_ret(::ascend::presenter::facial_recognition::ErrorCode value) { + + ret_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.CommonResponse.ret) +} + +// string message = 2; +inline void CommonResponse::clear_message() { + message_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& CommonResponse::message() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.CommonResponse.message) + return message_.GetNoArena(); +} +inline void CommonResponse::set_message(const std::string& value) { + + message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.CommonResponse.message) +} +inline void CommonResponse::set_message(std::string&& value) { + + message_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.CommonResponse.message) +} +inline void CommonResponse::set_message(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.CommonResponse.message) +} +inline void CommonResponse::set_message(const char* value, size_t size) { + + message_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.CommonResponse.message) +} +inline std::string* CommonResponse::mutable_message() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.CommonResponse.message) + return message_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* CommonResponse::release_message() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.CommonResponse.message) + + return message_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void CommonResponse::set_allocated_message(std::string* message) { + if (message != nullptr) { + + } else { + + } + message_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), message); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.CommonResponse.message) +} + +// ------------------------------------------------------------------- + +// RegisterApp + +// string id = 1; +inline void RegisterApp::clear_id() { + id_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& RegisterApp::id() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.RegisterApp.id) + return id_.GetNoArena(); +} +inline void RegisterApp::set_id(const std::string& value) { + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.RegisterApp.id) +} +inline void RegisterApp::set_id(std::string&& value) { + + id_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.RegisterApp.id) +} +inline void RegisterApp::set_id(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.RegisterApp.id) +} +inline void RegisterApp::set_id(const char* value, size_t size) { + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.RegisterApp.id) +} +inline std::string* RegisterApp::mutable_id() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.RegisterApp.id) + return id_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* RegisterApp::release_id() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.RegisterApp.id) + + return id_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void RegisterApp::set_allocated_id(std::string* id) { + if (id != nullptr) { + + } else { + + } + id_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), id); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.RegisterApp.id) +} + +// string type = 2; +inline void RegisterApp::clear_type() { + type_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& RegisterApp::type() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.RegisterApp.type) + return type_.GetNoArena(); +} +inline void RegisterApp::set_type(const std::string& value) { + + type_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.RegisterApp.type) +} +inline void RegisterApp::set_type(std::string&& value) { + + type_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.RegisterApp.type) +} +inline void RegisterApp::set_type(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + type_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.RegisterApp.type) +} +inline void RegisterApp::set_type(const char* value, size_t size) { + + type_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.RegisterApp.type) +} +inline std::string* RegisterApp::mutable_type() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.RegisterApp.type) + return type_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* RegisterApp::release_type() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.RegisterApp.type) + + return type_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void RegisterApp::set_allocated_type(std::string* type) { + if (type != nullptr) { + + } else { + + } + type_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), type); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.RegisterApp.type) +} + +// ------------------------------------------------------------------- + +// Box + +// uint32 lt_x = 1; +inline void Box::clear_lt_x() { + lt_x_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 Box::lt_x() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.Box.lt_x) + return lt_x_; +} +inline void Box::set_lt_x(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + lt_x_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.Box.lt_x) +} + +// uint32 lt_y = 2; +inline void Box::clear_lt_y() { + lt_y_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 Box::lt_y() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.Box.lt_y) + return lt_y_; +} +inline void Box::set_lt_y(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + lt_y_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.Box.lt_y) +} + +// uint32 rb_x = 3; +inline void Box::clear_rb_x() { + rb_x_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 Box::rb_x() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.Box.rb_x) + return rb_x_; +} +inline void Box::set_rb_x(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + rb_x_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.Box.rb_x) +} + +// uint32 rb_y = 4; +inline void Box::clear_rb_y() { + rb_y_ = 0u; +} +inline ::PROTOBUF_NAMESPACE_ID::uint32 Box::rb_y() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.Box.rb_y) + return rb_y_; +} +inline void Box::set_rb_y(::PROTOBUF_NAMESPACE_ID::uint32 value) { + + rb_y_ = value; + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.Box.rb_y) +} + +// ------------------------------------------------------------------- + +// FaceFeature + +// .ascend.presenter.facial_recognition.Box box = 1; +inline bool FaceFeature::has_box() const { + return this != internal_default_instance() && box_ != nullptr; +} +inline void FaceFeature::clear_box() { + if (GetArenaNoVirtual() == nullptr && box_ != nullptr) { + delete box_; + } + box_ = nullptr; +} +inline const ::ascend::presenter::facial_recognition::Box& FaceFeature::box() const { + const ::ascend::presenter::facial_recognition::Box* p = box_; + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceFeature.box) + return p != nullptr ? *p : *reinterpret_cast( + &::ascend::presenter::facial_recognition::_Box_default_instance_); +} +inline ::ascend::presenter::facial_recognition::Box* FaceFeature::release_box() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.FaceFeature.box) + + ::ascend::presenter::facial_recognition::Box* temp = box_; + box_ = nullptr; + return temp; +} +inline ::ascend::presenter::facial_recognition::Box* FaceFeature::mutable_box() { + + if (box_ == nullptr) { + auto* p = CreateMaybeMessage<::ascend::presenter::facial_recognition::Box>(GetArenaNoVirtual()); + box_ = p; + } + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FaceFeature.box) + return box_; +} +inline void FaceFeature::set_allocated_box(::ascend::presenter::facial_recognition::Box* box) { + ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete box_; + } + if (box) { + ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + box = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( + message_arena, box, submessage_arena); + } + + } else { + + } + box_ = box; + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.FaceFeature.box) +} + +// repeated float vector = 2; +inline int FaceFeature::vector_size() const { + return vector_.size(); +} +inline void FaceFeature::clear_vector() { + vector_.Clear(); +} +inline float FaceFeature::vector(int index) const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceFeature.vector) + return vector_.Get(index); +} +inline void FaceFeature::set_vector(int index, float value) { + vector_.Set(index, value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.FaceFeature.vector) +} +inline void FaceFeature::add_vector(float value) { + vector_.Add(value); + // @@protoc_insertion_point(field_add:ascend.presenter.facial_recognition.FaceFeature.vector) +} +inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >& +FaceFeature::vector() const { + // @@protoc_insertion_point(field_list:ascend.presenter.facial_recognition.FaceFeature.vector) + return vector_; +} +inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >* +FaceFeature::mutable_vector() { + // @@protoc_insertion_point(field_mutable_list:ascend.presenter.facial_recognition.FaceFeature.vector) + return &vector_; +} + +// ------------------------------------------------------------------- + +// FaceInfo + +// string id = 1; +inline void FaceInfo::clear_id() { + id_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& FaceInfo::id() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceInfo.id) + return id_.GetNoArena(); +} +inline void FaceInfo::set_id(const std::string& value) { + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.FaceInfo.id) +} +inline void FaceInfo::set_id(std::string&& value) { + + id_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.FaceInfo.id) +} +inline void FaceInfo::set_id(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.FaceInfo.id) +} +inline void FaceInfo::set_id(const char* value, size_t size) { + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.FaceInfo.id) +} +inline std::string* FaceInfo::mutable_id() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FaceInfo.id) + return id_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* FaceInfo::release_id() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.FaceInfo.id) + + return id_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void FaceInfo::set_allocated_id(std::string* id) { + if (id != nullptr) { + + } else { + + } + id_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), id); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.FaceInfo.id) +} + +// bytes image = 2; +inline void FaceInfo::clear_image() { + image_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& FaceInfo::image() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceInfo.image) + return image_.GetNoArena(); +} +inline void FaceInfo::set_image(const std::string& value) { + + image_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.FaceInfo.image) +} +inline void FaceInfo::set_image(std::string&& value) { + + image_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.FaceInfo.image) +} +inline void FaceInfo::set_image(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + image_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.FaceInfo.image) +} +inline void FaceInfo::set_image(const void* value, size_t size) { + + image_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.FaceInfo.image) +} +inline std::string* FaceInfo::mutable_image() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FaceInfo.image) + return image_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* FaceInfo::release_image() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.FaceInfo.image) + + return image_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void FaceInfo::set_allocated_image(std::string* image) { + if (image != nullptr) { + + } else { + + } + image_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), image); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.FaceInfo.image) +} + +// ------------------------------------------------------------------- + +// FaceResult + +// string id = 1; +inline void FaceResult::clear_id() { + id_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& FaceResult::id() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceResult.id) + return id_.GetNoArena(); +} +inline void FaceResult::set_id(const std::string& value) { + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.FaceResult.id) +} +inline void FaceResult::set_id(std::string&& value) { + + id_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.FaceResult.id) +} +inline void FaceResult::set_id(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.FaceResult.id) +} +inline void FaceResult::set_id(const char* value, size_t size) { + + id_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.FaceResult.id) +} +inline std::string* FaceResult::mutable_id() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FaceResult.id) + return id_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* FaceResult::release_id() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.FaceResult.id) + + return id_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void FaceResult::set_allocated_id(std::string* id) { + if (id != nullptr) { + + } else { + + } + id_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), id); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.FaceResult.id) +} + +// .ascend.presenter.facial_recognition.CommonResponse response = 2; +inline bool FaceResult::has_response() const { + return this != internal_default_instance() && response_ != nullptr; +} +inline void FaceResult::clear_response() { + if (GetArenaNoVirtual() == nullptr && response_ != nullptr) { + delete response_; + } + response_ = nullptr; +} +inline const ::ascend::presenter::facial_recognition::CommonResponse& FaceResult::response() const { + const ::ascend::presenter::facial_recognition::CommonResponse* p = response_; + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceResult.response) + return p != nullptr ? *p : *reinterpret_cast( + &::ascend::presenter::facial_recognition::_CommonResponse_default_instance_); +} +inline ::ascend::presenter::facial_recognition::CommonResponse* FaceResult::release_response() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.FaceResult.response) + + ::ascend::presenter::facial_recognition::CommonResponse* temp = response_; + response_ = nullptr; + return temp; +} +inline ::ascend::presenter::facial_recognition::CommonResponse* FaceResult::mutable_response() { + + if (response_ == nullptr) { + auto* p = CreateMaybeMessage<::ascend::presenter::facial_recognition::CommonResponse>(GetArenaNoVirtual()); + response_ = p; + } + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FaceResult.response) + return response_; +} +inline void FaceResult::set_allocated_response(::ascend::presenter::facial_recognition::CommonResponse* response) { + ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete response_; + } + if (response) { + ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + response = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage( + message_arena, response, submessage_arena); + } + + } else { + + } + response_ = response; + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.FaceResult.response) +} + +// repeated .ascend.presenter.facial_recognition.FaceFeature feature = 3; +inline int FaceResult::feature_size() const { + return feature_.size(); +} +inline void FaceResult::clear_feature() { + feature_.Clear(); +} +inline ::ascend::presenter::facial_recognition::FaceFeature* FaceResult::mutable_feature(int index) { + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FaceResult.feature) + return feature_.Mutable(index); +} +inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >* +FaceResult::mutable_feature() { + // @@protoc_insertion_point(field_mutable_list:ascend.presenter.facial_recognition.FaceResult.feature) + return &feature_; +} +inline const ::ascend::presenter::facial_recognition::FaceFeature& FaceResult::feature(int index) const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FaceResult.feature) + return feature_.Get(index); +} +inline ::ascend::presenter::facial_recognition::FaceFeature* FaceResult::add_feature() { + // @@protoc_insertion_point(field_add:ascend.presenter.facial_recognition.FaceResult.feature) + return feature_.Add(); +} +inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >& +FaceResult::feature() const { + // @@protoc_insertion_point(field_list:ascend.presenter.facial_recognition.FaceResult.feature) + return feature_; +} + +// ------------------------------------------------------------------- + +// FrameInfo + +// bytes image = 1; +inline void FrameInfo::clear_image() { + image_.ClearToEmptyNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline const std::string& FrameInfo::image() const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FrameInfo.image) + return image_.GetNoArena(); +} +inline void FrameInfo::set_image(const std::string& value) { + + image_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:ascend.presenter.facial_recognition.FrameInfo.image) +} +inline void FrameInfo::set_image(std::string&& value) { + + image_.SetNoArena( + &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:ascend.presenter.facial_recognition.FrameInfo.image) +} +inline void FrameInfo::set_image(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + image_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:ascend.presenter.facial_recognition.FrameInfo.image) +} +inline void FrameInfo::set_image(const void* value, size_t size) { + + image_.SetNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:ascend.presenter.facial_recognition.FrameInfo.image) +} +inline std::string* FrameInfo::mutable_image() { + + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FrameInfo.image) + return image_.MutableNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline std::string* FrameInfo::release_image() { + // @@protoc_insertion_point(field_release:ascend.presenter.facial_recognition.FrameInfo.image) + + return image_.ReleaseNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited()); +} +inline void FrameInfo::set_allocated_image(std::string* image) { + if (image != nullptr) { + + } else { + + } + image_.SetAllocatedNoArena(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), image); + // @@protoc_insertion_point(field_set_allocated:ascend.presenter.facial_recognition.FrameInfo.image) +} + +// repeated .ascend.presenter.facial_recognition.FaceFeature feature = 2; +inline int FrameInfo::feature_size() const { + return feature_.size(); +} +inline void FrameInfo::clear_feature() { + feature_.Clear(); +} +inline ::ascend::presenter::facial_recognition::FaceFeature* FrameInfo::mutable_feature(int index) { + // @@protoc_insertion_point(field_mutable:ascend.presenter.facial_recognition.FrameInfo.feature) + return feature_.Mutable(index); +} +inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >* +FrameInfo::mutable_feature() { + // @@protoc_insertion_point(field_mutable_list:ascend.presenter.facial_recognition.FrameInfo.feature) + return &feature_; +} +inline const ::ascend::presenter::facial_recognition::FaceFeature& FrameInfo::feature(int index) const { + // @@protoc_insertion_point(field_get:ascend.presenter.facial_recognition.FrameInfo.feature) + return feature_.Get(index); +} +inline ::ascend::presenter::facial_recognition::FaceFeature* FrameInfo::add_feature() { + // @@protoc_insertion_point(field_add:ascend.presenter.facial_recognition.FrameInfo.feature) + return feature_.Add(); +} +inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::ascend::presenter::facial_recognition::FaceFeature >& +FrameInfo::feature() const { + // @@protoc_insertion_point(field_list:ascend.presenter.facial_recognition.FrameInfo.feature) + return feature_; +} + +#ifdef __GNUC__ + #pragma GCC diagnostic pop +#endif // __GNUC__ +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace facial_recognition +} // namespace presenter +} // namespace ascend + +PROTOBUF_NAMESPACE_OPEN + +template <> struct is_proto_enum< ::ascend::presenter::facial_recognition::ErrorCode> : ::std::true_type {}; +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::ascend::presenter::facial_recognition::ErrorCode>() { + return ::ascend::presenter::facial_recognition::ErrorCode_descriptor(); +} + +PROTOBUF_NAMESPACE_CLOSE + +// @@protoc_insertion_point(global_scope) + +#include +#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_facial_5frecognition_5fmessage_2eproto diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/facial_thread_base.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/facial_thread_base.h new file mode 100644 index 0000000000000000000000000000000000000000..ebf5a5f554d68a08a212c3c81c6b224ead4b3f8e --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/facial_thread_base.h @@ -0,0 +1,198 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef FACILA_THREAD_BASE_H +#define FACILA_THREAD_BASE_H + +#include +#include +#include +#include +#include + +#include "atlasutil/atlas_thread.h" +#include "atlasutil/atlas_app.h" +#include "atlasutil/atlas_utils.h" +#include "atlasutil/parse_config.h" +#include "face_recognition_params.h" + + +class FacialThreadBase : public AtlasThread { +public: + /** + * @brief: constructor + * @param [in]: configFile: App config file + */ + FacialThreadBase(const std::string& configFile) : + configFile_(configFile), + modelWidth_(0), + modelHeight_(0), + modelPath_(""), + nextThreadId_(INVALID_INSTANCE_ID) { + GetBaseConfig(); + }; + + /** + * @brief: destruction function + */ + ~FacialThreadBase() {}; + + /** + * @brief: Thread init function, override AtlasThread init method + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed + */ + virtual AtlasError Init() { return ATLAS_OK; } + + /** + * @brief: The message process entry of thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + virtual AtlasError Process(int msgId, std::shared_ptr msgData) { + return ATLAS_OK; + } + + /** + * @brief: Send message to next thread + * @param [in]: msgId: message id + * @param [in]: data: message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, the thread will exit + */ + AtlasError SendMessageToNext(int msgId, std::shared_ptr data) { + return SendMessage(NextThreadId(), msgId, data); + } + + /** + * @brief: Get model width of current thread configuration.The config + * is in app config file with format .model_width + * @return: 0:No model width config or read config failed + * > 0: model width + */ + uint32_t ModelWidth() { return modelWidth_; } + + /** + * @brief: Get model height of current thread configuration.The config + * is in app config file with format .model_height + * @return: 0:No model height config or read config failed + * > 0: model height + */ + uint32_t ModelHeight() { return modelHeight_; } + + /** + * @brief: Get model path of current thread configuration.The config + * is in app config file with format .model_path + * @return: "":No model path config or read config failed + * other: model path + */ + const std::string& ModelPath() { return modelPath_; } + + /** + * @brief: Get next thread id of current thread.The next thread + * is in app config file with format .next_thread + * @return: -1:No next thread config or read config failed + * >= 1: next thread id + */ + int NextThreadId() { + if ((nextThreadId_ == INVALID_INSTANCE_ID) && + (nextThreadName_.size() > 0)) { + nextThreadId_ = GetAtlasThreadIdByName(nextThreadName_); + ATLAS_LOG_INFO("%s: next thread name %s, id %d", + SelfInstanceName().c_str(), + nextThreadName_.c_str(), nextThreadId_); + } + + return nextThreadId_; + } + + /** + * @brief: Get thread config, include model_width, model_height, + * model_path, next_thread + * @return: ATLAS_OK:No config or get config success + * ATLAS_ERROR: read config file failed + */ + AtlasError GetBaseConfig() { + const std::string& selfName = SelfInstanceName(); + + std::string modelWidthKey = selfName + ".model_width"; + std::string modelHeightKey = selfName + ".model_height"; + std::string modelPahtKey = selfName + ".model_path"; + std::string nextThreadKey = selfName + ".next_thread"; + + std::map config; + if(!ReadConfig(config, configFile_.c_str())) { + ATLAS_LOG_ERROR("Read config %s failed", configFile_.c_str()); + return ATLAS_ERROR; + } + + std::map::const_iterator mIter = config.begin(); + for (; mIter != config.end(); ++mIter) { + if (mIter->first == modelWidthKey) { + modelWidth_ = atoi(mIter->second.c_str()); + ATLAS_LOG_INFO("%s: model width %d", + SelfInstanceName().c_str(), modelWidth_); + } else if (mIter->first == modelHeightKey) { + modelHeight_ = atoi(mIter->second.c_str()); + ATLAS_LOG_INFO("%s: model height %d", + SelfInstanceName().c_str(), modelHeight_); + } else if (mIter->first == modelPahtKey) { + modelPath_.assign(mIter->second.c_str()); + ATLAS_LOG_INFO("%s: model path: %s", + SelfInstanceName().c_str(), modelPath_.c_str()); + } else if (mIter->first == nextThreadKey) { + nextThreadName_.assign(mIter->second.c_str()); + ATLAS_LOG_INFO("%s: next thread name %s", + SelfInstanceName().c_str(), mIter->second.c_str()); + } + } + + return ATLAS_OK; + } + +private: + const std::string& configFile_; + std::string nextThreadName_; + uint32_t modelWidth_; + uint32_t modelHeight_; + std::string modelPath_; + int nextThreadId_; +}; + +#endif /* MindCamera_H */ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/mind_camera.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/mind_camera.h new file mode 100644 index 0000000000000000000000000000000000000000..30c874ef8ebd68262c147e9d37cc8740554a45c3 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/mind_camera.h @@ -0,0 +1,97 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef MIND_CAMERA_H +#define MIND_CAMERA_H + +#include +#include + +#include "atlasutil/atlas_videocapture.h" +#include "face_recognition_params.h" + +#include "facial_thread_base.h" + +/** + * @brief: Camera class + */ +class MindCamera : public FacialThreadBase { +public: + /** + * @brief: constructor + * @param [in]: configFile: App config file + */ + MindCamera(const std::string& configFile); + + /** + * @brief: destruction function + */ + ~MindCamera(); + + /** + * @brief: Mind camera thread init function + * @param [in]: None + * @return: Init result + * ATLAS_OK: Init success + * ATLAS_ERROR: Init failed, he thread will exit + */ + AtlasError Init(); + + /** + * @brief: The message process entry of mind camera thread received + * @param [in]: msgId: The received message id + * @param [in]: msgData: The received message data + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, he thread will exit + */ + AtlasError Process(int msgId, std::shared_ptr msgData); + +private: + /** + * @brief: Process MSG_READ_FRAME message + * @param [in]: None + * @return: Message process result + * ATLAS_OK: process success + * ATLAS_ERROR: process failed, he thread will exit + */ + AtlasError ReadFrameMsgProcess(); + +private: + //Mind camera self thread id + int selfThreadId_; + //Camera/video decode instance pointer + AtlasVideoCapture* cap_; +}; + +#endif /* MindCamera_H */ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/presenter_channels.h b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/presenter_channels.h new file mode 100644 index 0000000000000000000000000000000000000000..fcc8108b7941f44fbe92000ea821cdfca6d88f31 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/inc/presenter_channels.h @@ -0,0 +1,186 @@ +/** + * ============================================================================ + * + * Copyright (C) 2018-2020, Hisilicon Technologies Co., Ltd. All Rights Reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1 Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2 Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3 Neither the names of the copyright holders nor the names of the + * contributors may be used to endorse or promote products derived from this + * software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * ============================================================================ + */ + +#ifndef PRESENTER_CHANNELS_H +#define PRESENTER_CHANNELS_H + +#include "facial_recognition_message.pb.h" + +#include +#include +#include +#include +#include + +#include "ascenddk/presenter/agent/presenter_types.h" +#include "ascenddk/presenter/agent/channel.h" +#include "ascenddk/presenter/agent/presenter_channel.h" + +#define COMMENT_CHAR '#' +#define EQUALS_CHAR '=' +#define BLANK_SPACE_CHAR ' ' +#define TABLE_CHAR '\t' + +struct PresenterServerParams { + // ip of presenter server + std::string hostIp; + // port of presenter server + std::uint16_t port; + // name of registered app + std::string appId; + // type of registered app + std::string appType; +}; + +class PresenterChannels { +public: + /** + * @brief: Get global unique PresenterChannels instance + * @return: PresenterChannels instance + */ + static PresenterChannels& GetInstance() { + static PresenterChannels instance; + return instance; + } + + /** + * @brief: Init presenter server connection parameters + * @param [in]: connection parameters + * @return: None + */ + void Init(const PresenterServerParams& param) { + param_ = param; + } + + /** + * @brief: Get face register channel handle, if the channel is + * not exist, create it + * @return: face register channel between app and presenter server + */ + ascend::presenter::Channel* GetChannel() { + if (intfChannel_ != nullptr) { + return intfChannel_.get(); + } + + // create agent channel by hostIp and port + ascend::presenter::ChannelFactory channelFactory; + ascend::presenter::Channel *agentChannel = channelFactory.NewChannel( + param_.hostIp, param_.port); + + //open present channel + ascend::presenter::PresenterErrorCode presentOpenErr = + agentChannel->Open(); + if (presentOpenErr != ascend::presenter::PresenterErrorCode::kNone) { + return nullptr; + } + + // register app to presenter server + ascend::presenter::facial_recognition::RegisterApp appRegister; + appRegister.set_id(param_.appId); + appRegister.set_type(param_.appType); + + // construct responded protobuf Message + std::unique_ptr < google::protobuf::Message > response; + + // send registered request to server + ascend::presenter::PresenterErrorCode presentRegisterErr = agentChannel + ->SendMessage(appRegister, response); + if (presentRegisterErr != ascend::presenter::PresenterErrorCode::kNone) { + return nullptr; + } + + // get responded Message and judge result + ascend::presenter::facial_recognition::CommonResponse* registerResponse = + dynamic_cast(response + .get()); + if (registerResponse == nullptr) { + return nullptr; + } + ascend::presenter::facial_recognition::ErrorCode registerErr = + registerResponse->ret(); + if (registerErr != ascend::presenter::facial_recognition::kErrorNone) { + return nullptr; + } + + intfChannel_.reset(agentChannel); + + return intfChannel_.get(); + } + + /** + * @brief: Get connection channel between presenter server and post process + * thread, ifnot exist, create it + * @return: presenter server channel + */ + ascend::presenter::Channel* GetPresenterChannel() { + // channel already exist, return it + if (presenterChannel_ != nullptr) { + return presenterChannel_.get(); + } + + // channel not exist, open it + ascend::presenter::Channel *ch = nullptr; + ascend::presenter::OpenChannelParam param; + param.host_ip = param_.hostIp; + param.port = param_.port; + param.channel_name = param_.appId; + param.content_type = ascend::presenter::ContentType::kVideo; + + ascend::presenter::PresenterErrorCode error_code = + ascend::presenter::OpenChannel(ch, param); + + // open channel failed + if (error_code != ascend::presenter::PresenterErrorCode::kNone) { + //ERROR_LOG("Open channel failed! %d", + // error_code); + return nullptr; + } + + // open channel successfully, set it to private parameter + presenterChannel_.reset(ch); + return presenterChannel_.get(); + } + +private: + + // intf channel for face register + std::unique_ptr intfChannel_; + + // presenter channel for camera data + std::unique_ptr presenterChannel_; + + // channel params + PresenterServerParams param_; +}; + +#endif diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model/.keep b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/model/.keep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main new file mode 100644 index 0000000000000000000000000000000000000000..1e227bc4ed5c0d9aa7329b81b7d148f7c1f6fbe4 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/main differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/param.conf b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/param.conf new file mode 100644 index 0000000000000000000000000000000000000000..b449d49df20dd7ce8f52f61670889dc0be8d6cda --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/out/param.conf @@ -0,0 +1,40 @@ +[baseconf] +# A socket server address to communicate with presenter agent +presenter_server_ip=192.168.1.223 + +# The port of presenter agent and server communicate with +presenter_server_port=7008 + +# the ip in presenter server view web url +presenter_view_ip=192.168.1.223 + +# view entry label in presenter server view web +channel_name=face_recognition + +#the data type that send to presenter server from agent, 0:image, 1:video +content_type=1 + +mind_camera.thread_name=mind_camera +mind_camera.next_thread=face_detect + +face_register.thread_name=face_register +face_register.next_thread=face_detect + +face_detect.model_width=304 +face_detect.model_height=300 +face_detect.model_path=../model/face_detection.om +face_detect.thread_name=face_detect +face_detect.next_thread=face_feature_mask + +face_feature_mask.model_width=40 +face_feature_mask.model_height=40 +face_feature_mask.model_path=../model/vanillacnn.om +face_feature_mask.thread_name=face_feature_mask +face_feature_mask.next_thread=face_recognition + +face_recognition.model_width=96 +face_recognition.model_height=112 +face_recognition.model_path=../model/sphereface.om +face_recognition.thread_name=face_recognition +face_recognition.next_thread=face_post_process + diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/README b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/README new file mode 100644 index 0000000000000000000000000000000000000000..f151a4558cc2039569f61eb67aec1c7beb83cdd6 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/README @@ -0,0 +1,15 @@ +First, you must ensure "python3.5" is install in your ubuntu environment. + +# Install the dependency package. +# First go to the presenter server directory +1. sudo pip3 install -r requirements + +# Configure your network information. +# Modify presenter_server_ip to the IP address of the private network that can be accessed by the development board. +# Modify web_server_ip to the IP address that your Chrome browser can access. +2. sudo vim facial_recognition/config/config.conf + +# Go to the source code directory and run the python program. +3. python3.5 presenter_server.py --app=facial_recognition + +4. Open your browser(only support Chrome now), and visit the website, for example: http:192.168.1.100:7009 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/__init__.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__init__.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/__init__.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24bf4592bdd7a9d7edd39549ffdbe6e624b4b4d7 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/__init__.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/app_manager.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/app_manager.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5906ce548b04ad32fd783b50da9e9f6c11ffed99 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/app_manager.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/channel_handler.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/channel_handler.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..952bb64773a933472b4e598d08fe60b20fa933fe Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/channel_handler.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/channel_manager.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/channel_manager.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7cb165af7ef9208c7f3b82c6010e27d49b3d85e7 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/channel_manager.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/parameter_validation.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/parameter_validation.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6dd3b6755fcb6059fec2eaa927fac224f1354a4a Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/parameter_validation.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/presenter_message_pb2.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/presenter_message_pb2.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4f8fe2df0652969b0a795ea162ccea3ecfb0e20 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/presenter_message_pb2.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/presenter_socket_server.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/presenter_socket_server.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40bbc1bbca78b197d940221d148383808db9a60c Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/__pycache__/presenter_socket_server.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/app_manager.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/app_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..a953494170d5f9ff9b616910ad20669c47f06acf --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/app_manager.py @@ -0,0 +1,209 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +"""presenter app manager module""" + +import time +import threading +import logging +from common.channel_manager import ChannelManager + +# Heartbeat timeout, exceeding the limit, the socket will disconnect +HEARTBEAT_TIMEOUT = 100 + +class App(): + '''App class, When receive an app request from + Presenter Agent, creat an object. + ''' + def __init__(self, app_id, conn=None): + self.app_id = app_id + self.heartbeat = time.time() + self.socket_fd = conn.fileno() + # set timeout 1 second + conn.settimeout(1) + self.socket = conn + self.frame_num_dict = {} + +class AppManager(): + '''A class provides app management features''' + __instance = None + channel_manager = None + app_list_lock = threading.Lock() + app_list = [] + thread_switch = False + + def __init__(self): + """init func""" + + + def __new__(cls): + """ensure only a single instance created. """ + if cls.__instance is None: + cls.__instance = object.__new__(cls) + cls.channel_manager = ChannelManager([]) + cls._create_thread() + return cls.__instance + + @classmethod + def _create_thread(cls): + """_create_thread.""" + + thread = threading.Thread(target=cls._app_thread) + thread.start() + + @classmethod + def _app_thread(cls): + """background thread to process video""" + logging.info('create app manager thread') + while True: + if cls.thread_switch: + break + for i in range(len(cls.app_list)): + if time.time() - cls.app_list[i].heartbeat > HEARTBEAT_TIMEOUT: + app_id = cls.app_list[i].app_id + cls.channel_manager.unregister_one_channel(app_id) + del cls.app_list[i] + logging.info("unregister app: %s", app_id) + time.sleep(1) + + def set_thread_switch(self): + AppManager.thread_switch = True + + def register_app(self, app_id, socket): + """ + API for registering an app + Args: + app_id: app id, must be globally unique + socket: a socket communicating with the app + """ + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].app_id == app_id: + return False + + app = App(app_id, socket) + self.app_list.append(app) + self.channel_manager.register_one_channel(app_id) + logging.info("register app: %s", app_id) + return True + + def unregister_app_by_fd(self, sock_fileno): + """ + API for unregistering an app + Args: + sock_fileno: sock_fileno is binded to an app. + Through it, find the app and delete it. + """ + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].socket_fd == sock_fileno: + app_id = self.app_list[i].app_id + self.channel_manager.unregister_one_channel(app_id) + del self.app_list[i] + logging.info("unregister app: %s", app_id) + break + + def get_socket_by_app_id(self, app_id): + """ + API for finding an app + Args: + app_id: the id of an app. + """ + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].app_id == app_id: + return self.app_list[i].socket + return None + + def get_app_id_by_socket(self, sock_fd): + """ + API for get app id by socket + Args: + sock_fd: sock_fd is binded to an app. + Through it, find the app and delete it. + """ + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].socket_fd == sock_fd: + return self.app_list[i].app_id + return None + + def is_app_exist(self, app_id): + """ + API for checking if the app exist + Args: + app_id: the id of an app. + """ + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].app_id == app_id: + return True + return False + + def get_app_num(self): + """ + API for getting the number of apps + Args: NA + """ + with self.app_list_lock: + return len(self.app_list) + + def set_heartbeat(self, sock_fileno): + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].socket_fd == sock_fileno: + self.app_list[i].heartbeat = time.time() + + def increase_frame_num(self, app_id, channel_id): + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].app_id == app_id: + if channel_id in self.app_list[i].frame_num_dict: + self.app_list[i].frame_num_dict[channel_id] += 1 + else: + self.app_list[i].frame_num_dict[channel_id] = 1 + + def get_frame_num(self, app_id, channel_id): + with self.app_list_lock: + for i in range(len(self.app_list)): + if self.app_list[i].app_id == app_id: + if channel_id in self.app_list[i].frame_num_dict: + return self.app_list[i].frame_num_dict[channel_id] + else: + return 0 + return 0 + def list_app(self): + """ + API for listing all apps + Args: NA + """ + with self.app_list_lock: + return [self.app_list[i].app_id for i in range(len(self.app_list))] diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/channel_handler.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/channel_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..d7fa5d4fb2db9fd36cd80a6be69a33f5d3256f3e --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/channel_handler.py @@ -0,0 +1,227 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# + +"""presenter channel manager module""" + +import time +import logging +import threading +from threading import get_ident +from common.channel_manager import ChannelManager + +# thread event timeout, The unit is second. +WEB_EVENT_TIMEOUT = 2 +# thread event timeout, The unit is second. +IMAGE_EVENT_TIMEOUT = 10 + +# heart beat timeout, The unit is second. +HEARTBEAT_TIMEOUT = 100 + +class ThreadEvent(): + """An Event-like class that signals all active clients when a new frame is + available. + """ + def __init__(self, timeout=None): + self.events = {} + self.timeout = timeout + + def wait(self): + """Invoked from each client's thread to wait for the next frame.""" + ident = get_ident() + if ident not in self.events: + # this is a new client + # add an entry for it in the self.events dict + # each entry has two elements, a threading.Event() and a timestamp + self.events[ident] = [threading.Event(), time.time()] + return self.events[ident][0].wait(self.timeout) + + def set(self): + """Invoked by the camera thread when a new frame is available.""" + now = time.time() + remove = None + for ident, event in self.events.items(): + if not event[0].isSet(): + # if this client's event is not set, then set it + # also update the last set timestamp to now + event[0].set() + event[1] = now + else: + # if the client's event is already set, it means the client + # did not process a previous frame + # if the event stays set for more than 5 seconds, then assume + # the client is gone and remove it + if now - event[1] > 5: + remove = ident + if remove: + del self.events[remove] + + def clear(self): + """Invoked from each client's thread after a frame was processed.""" + self.events[get_ident()][0].clear() + +class ChannelHandler(): + """A set of channel handlers, process data received from channel""" + def __init__(self, channel_name, media_type): + self.channel_name = channel_name + self.media_type = media_type + self.img_data = None + self._frame = None + self.thread = None + self._frame = None + # last time the channel receive data. + self.heartbeat = time.time() + self.web_event = ThreadEvent(timeout=WEB_EVENT_TIMEOUT) + self.image_event = ThreadEvent(timeout=IMAGE_EVENT_TIMEOUT) + self.lock = threading.Lock() + self.channel_manager = ChannelManager([]) + self.rectangle_list = None + + if media_type == "video": + self.thread_name = "videothread-{}".format(self.channel_name) + self.heartbeat = time.time() + self.close_thread_switch = False + self.fps = 0 + self.image_number = 0 + self.time_list = [] + self._create_thread() + + def close_thread(self): + """close thread if object has created""" + if self.thread is None: + return + + self.set_thread_switch() + self.image_event.set() + logging.info("%s set _close_thread_switch True", self.thread_name) + + def set_heartbeat(self): + """record heartbeat""" + self.heartbeat = time.time() + + def set_thread_switch(self): + """record heartbeat""" + self.close_thread_switch = True + + def save_image(self, data, width, height, rectangle_list): + """save image receive from socket""" + self.width = width + self.height = height + self.rectangle_list = rectangle_list + + # compute fps if type is video + if self.media_type == "video": + while self.img_data: + time.sleep(0.01) + + self.time_list.append(self.heartbeat) + self.image_number += 1 + while self.time_list[0] + 1 < time.time(): + self.time_list.pop(0) + self.image_number -= 1 + if self.image_number == 0: + break + + self.fps = len(self.time_list) + self.img_data = data + self.image_event.set() + else: + self.img_data = data + self.channel_manager.save_channel_image(self.channel_name, + self.img_data, self.rectangle_list) + + self.heartbeat = time.time() + + + def get_media_type(self): + """get media_type, support image or video""" + return self.media_type + + def get_image(self): + """get image_data""" + return self.img_data + + def _create_thread(self): + """Start the background video thread if it isn't running yet.""" + if self.thread is not None and self.thread.isAlive(): + return + + # start background frame thread + self.thread = threading.Thread(target=self._video_thread) + self.thread.start() + + def get_frame(self): + """Return the current video frame.""" + # wait util receive a frame data, and push it to your browser. + ret = self.web_event.wait() + self.web_event.clear() + # True: _web_event return because set() + # False: _web_event return because timeout + if ret: + return (self._frame, self.fps, self.width, self.height, self.rectangle_list) + + return (None, None, None, None, None) + + def frames(self): + """a generator generates image""" + while True: + self.image_event.wait() + self.image_event.clear() + if self.img_data: + yield self.img_data + self.img_data = None + + # if set _close_thread_switch, return immediately + if self.close_thread_switch: + yield None + + # if no frames or heartbeat coming in the last 100 seconds, + # stop the thread and close socket + if time.time() - self.heartbeat > HEARTBEAT_TIMEOUT: + self.set_thread_switch() + self.img_data = None + yield None + + def _video_thread(self): + """background thread to process video""" + logging.info('create %s...', (self.thread_name)) + for frame in self.frames(): + if frame: + # send signal to clients + self._frame = frame + self.web_event.set() + + # exit thread + if self.close_thread_switch: + self.channel_manager.clean_channel_resource_by_name( + self.channel_name) + logging.info('Stop thread:%s.', (self.thread_name)) + break diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/channel_manager.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/channel_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..04d857889f26cb8620d17f7c91e091d0b769cd3b --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/channel_manager.py @@ -0,0 +1,291 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +"""presenter channel manager module""" + +import logging +import threading + +# max support 10 channels +MAX_CHANNEL_NUM = 10 + +# when a channel have receive data, +# the active status will last 3 seconds +ACTIVE_LAST_TIME = 3 + +class ChannelResource(): + """every channel has a ChannelResource object, contains a ChannelHandler object + and a socket fileno. it corresponding to the ChannelFd one by one + """ + def __init__(self, handler, socket=None): + self.handler = handler + self.socket = socket + +class ChannelFd(): + """every channel has a ChannelFd object, contains a ChannelHandler + object and channel name. It corresponds to the ChannelResource one by one + """ + def __init__(self, channel_name, handler): + self.channel_name = channel_name + self.handler = handler + +class Channel(): + """record user register channels + self.image: if channel type is image, save the image here + """ + def __init__(self, channel_name): + self.channel_name = channel_name + self.image = None + self.rectangle_list = None + +class ChannelManager(): + """manage all the api about channel + __instance: ensure it is a single instance + _channel_resources: a dict + key: channel name + value: a ChannelResource() object. + _channel_fds: a dict + key: socket fileno + value: a ChannelFd() object. + _channel_list: a list, member is a Channel() object.""" + + __instance = None + channel_resources = {} + channel_fds = {} + channel_list = [] + channel_resource_lock = threading.Lock() + channel_fds_lock = threading.Lock() + channel_lock = threading.Lock() + err_code_ok = 0 + err_code_too_many_channel = 1 + err_code_repeat_channel = 2 + + def __init__(self, channel_list=None): + """init func""" + + def __new__(cls, channel_list=None): + """ensure only a single instance created. """ + if cls.__instance is None: + cls.__instance = object.__new__(cls) + # default create 2 channels: image and video + # if channel_list is not None and isinstance(channel_list, list): + # for i in channel_list: + # cls.channel_list.append(Channel(channel_name=i)) + # logging.info("register channel %s", i) + return cls.__instance + + def _register_channel_fd(self, sock_fileno, channel_name): + """Internal func, create a ChannelFd object""" + if self.channel_fds.get(sock_fileno): + del self.channel_fds[sock_fileno] + handler = self.channel_resources[channel_name].handler + self.channel_fds[sock_fileno] = ChannelFd(channel_name, handler) + + + def create_channel_resource(self, channel_name, + channel_fd, + media_type, + handler): + """create a ChannelResource object which contains all the resources + binding a channel. + channel_name: channel name. + channel_fd: socket fileno binding the channel. + media_type: support image or video. + handler: an channel handler process image data. + """ + with self.channel_resource_lock: + log_info = "create channel resource," + log_info += " channel_name:%s, channel_fd:%u, media_type:%s" + logging.info(log_info, channel_name, channel_fd, media_type) + self.channel_resources[channel_name] = \ + ChannelResource(handler=handler, socket=channel_fd) + self._register_channel_fd(channel_fd, channel_name) + + def _clean_channel_resource(self, channel_name): + """Internal func, clean channel resource by channel name""" + if self.channel_resources.get(channel_name): + self.channel_resources[channel_name].handler.close_thread() + self.channel_resources[channel_name].handler.web_event.set() + self.channel_resources[channel_name].handler.image_event.set() + del self.channel_resources[channel_name] + logging.info("clean channel: %s's resource", channel_name) + + def clean_channel_resource_by_fd(self, sock_fileno): + """ + clean channel resource by socket fileno + sock_fileno: socket fileno which binding to an channel + """ + with self.channel_fds_lock: + with self.channel_resource_lock: + if self.channel_fds.get(sock_fileno): + self._clean_channel_resource( + self.channel_fds[sock_fileno].channel_name) + del self.channel_fds[sock_fileno] + + def clean_channel_resource_by_name(self, channel_name): + """clean channel resource by channel_name + channel_name: channel name""" + if self.channel_resources.get(channel_name): + self.clean_channel_resource_by_fd( + self.channel_resources[channel_name].socket) + + def get_channel_handler_by_fd(self, sock_fileno): + """get channel handler by socket fileno""" + with self.channel_fds_lock: + if self.channel_fds.get(sock_fileno): + return self.channel_fds[sock_fileno].handler + return None + + def is_channel_busy(self, channel_name): + """check if channel is busy """ + with self.channel_resource_lock: + if self.channel_resources.get(channel_name): + return True + return False + + def close_all_thread(self): + """if a channel process video type, it will create a thread. + this func can close the thread. + """ + with self.channel_resource_lock: + for channel_name in self.channel_resources: + self.channel_resources[channel_name].handler.close_thread() + + def get_channel_handler_by_name(self, channel_name): + """ + get the channel handlerby channel name + """ + with self.channel_resource_lock: + if self.channel_resources.get(channel_name): + return self.channel_resources[channel_name].handler + return None + + def list_channels(self): + """ + return all the channel name and the status + status is indicating active state or not + """ + with self.channel_lock: + return [{'status': self.is_channel_busy(i.channel_name), + 'name': i.channel_name} for i in self.channel_list] + + def register_one_channel(self, channel_name): + """ + register a channel path, user create a channel via browser + """ + with self.channel_lock: + if len(self.channel_list) >= MAX_CHANNEL_NUM: + logging.info("register channel: %s fail, \ + exceed max number 10.", channel_name) + return self.err_code_too_many_channel + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + logging.info("register channel: %s fail, \ + already exist.", channel_name) + return self.err_code_repeat_channel + + self.channel_list.append(Channel(channel_name=channel_name)) + logging.info("register channel: %s", channel_name) + return self.err_code_ok + + def unregister_one_channel(self, channel_name): + """ + unregister a channel path, user delete a channel via browser + """ + with self.channel_lock: + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + self.clean_channel_resource_by_name(channel_name) + logging.info("unregister channel: %s", channel_name) + del self.channel_list[i] + break + + def is_channel_exist(self, channel_name): + """ + Check if a channel is exist + True: exist + False: not exist + """ + with self.channel_lock: + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + return True + return False + + def save_channel_image(self, channel_name, image_data, rectangle_list): + """ + when a channel bounding to image type, + server will permanent hold an image for it. + this func save a image in memory + """ + with self.channel_lock: + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + self.channel_list[i].image = image_data + self.channel_list[i].rectangle_list = rectangle_list + break + + def get_channel_image(self, channel_name): + """ + when a channel bounding to image type, + server will permanent hold an image for it. + this func get the image + """ + with self.channel_lock: + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + return self.channel_list[i].image + + # channel not exist + return None + + def get_channel_image_with_rectangle(self, channel_name): + """ + A new method for display server, + return the image and rectangle list + """ + with self.channel_lock: + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + return (self.channel_list[i].image, self.channel_list[i].rectangle_list) + return (None, None) + + def clean_channel_image(self, channel_name): + """ + when a channel bounding to image type, + server will permanent hold an image for it. + this func clean the image + """ + with self.channel_lock: + for i in range(len(self.channel_list)): + if self.channel_list[i].channel_name == channel_name: + self.channel_list[i].image = None + break diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/parameter_validation.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/parameter_validation.py new file mode 100644 index 0000000000000000000000000000000000000000..4402b1b32126c4af29a0247b742bd3e98609c20c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/parameter_validation.py @@ -0,0 +1,98 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +"""Parameter Validation module""" +import logging + +PORT_INTERVAL_BEGIN = 1024 +PORT_INTERVAL_END = 49151 + +def validate_ip(ip_str): + if ip_str == '0.0.0.0': + logging.error("IP Addr \"0.0.0.0\" is illegal") + print("IP Addr \"0.0.0.0\" is illegal") + return False + + sep = ip_str.split('.') + if len(sep) != 4: + return False + for i, x in enumerate(sep): + try: + int_x = int(x) + if int_x < 0 or int_x > 255: + logging.error("Illegal ip: %s", ip_str) + print("Illegal ip: %s"%ip_str) + return False + except ValueError: + logging.error("IP format error:%s", ip_str) + print("IP format error:%s"%ip_str) + return False + return True + +def validate_port(value_str): + try: + value = int(value_str) + if value < PORT_INTERVAL_BEGIN or value > PORT_INTERVAL_END: + logging.error("Illegal port: %d", value) + print("Illegal port: %d"%value) + return False + except ValueError: + logging.error("Port format error:%s", value_str) + print("Port format error:%s"%value_str) + return False + return True + +def validate_integer(value_str, begin, end): + try: + value = int(value_str) + if value < begin or value > end: + return False + except ValueError: + return False + return True + +def Integer_greater(value_str, compared_value): + try: + value = int(value_str) + if value < compared_value: + return False + except ValueError: + return False + return True + +def validate_float(value_str, begin, end): + try: + value = float(value_str) + if value < begin or value > end: + return False + except ValueError: + return False + return True \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/presenter_message_pb2.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/presenter_message_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..8e171b5cc50f34fd1eff5ad0ee5f57b648640965 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/presenter_message_pb2.py @@ -0,0 +1,525 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: presenter_message.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='presenter_message.proto', + package='ascend.presenter.proto', + syntax='proto3', + serialized_pb=_b('\n\x17presenter_message.proto\x12\x16\x61scend.presenter.proto\"l\n\x12OpenChannelRequest\x12\x14\n\x0c\x63hannel_name\x18\x01 \x01(\t\x12@\n\x0c\x63ontent_type\x18\x02 \x01(\x0e\x32*.ascend.presenter.proto.ChannelContentType\"n\n\x13OpenChannelResponse\x12@\n\nerror_code\x18\x01 \x01(\x0e\x32,.ascend.presenter.proto.OpenChannelErrorCode\x12\x15\n\rerror_message\x18\x02 \x01(\t\"\x12\n\x10HeartbeatMessage\"\"\n\nCoordinate\x12\t\n\x01x\x18\x01 \x01(\r\x12\t\n\x01y\x18\x02 \x01(\r\"\x94\x01\n\x0eRectangle_Attr\x12\x34\n\x08left_top\x18\x01 \x01(\x0b\x32\".ascend.presenter.proto.Coordinate\x12\x38\n\x0cright_bottom\x18\x02 \x01(\x0b\x32\".ascend.presenter.proto.Coordinate\x12\x12\n\nlabel_text\x18\x03 \x01(\t\"\xb7\x01\n\x13PresentImageRequest\x12\x33\n\x06\x66ormat\x18\x01 \x01(\x0e\x32#.ascend.presenter.proto.ImageFormat\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12>\n\x0erectangle_list\x18\x05 \x03(\x0b\x32&.ascend.presenter.proto.Rectangle_Attr\"o\n\x14PresentImageResponse\x12@\n\nerror_code\x18\x01 \x01(\x0e\x32,.ascend.presenter.proto.PresentDataErrorCode\x12\x15\n\rerror_message\x18\x02 \x01(\t*\xa5\x01\n\x14OpenChannelErrorCode\x12\x19\n\x15kOpenChannelErrorNone\x10\x00\x12\"\n\x1ekOpenChannelErrorNoSuchChannel\x10\x01\x12)\n%kOpenChannelErrorChannelAlreadyOpened\x10\x02\x12#\n\x16kOpenChannelErrorOther\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01*P\n\x12\x43hannelContentType\x12\x1c\n\x18kChannelContentTypeImage\x10\x00\x12\x1c\n\x18kChannelContentTypeVideo\x10\x01*#\n\x0bImageFormat\x12\x14\n\x10kImageFormatJpeg\x10\x00*\xa4\x01\n\x14PresentDataErrorCode\x12\x19\n\x15kPresentDataErrorNone\x10\x00\x12$\n kPresentDataErrorUnsupportedType\x10\x01\x12&\n\"kPresentDataErrorUnsupportedFormat\x10\x02\x12#\n\x16kPresentDataErrorOther\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x62\x06proto3') +) + +_OPENCHANNELERRORCODE = _descriptor.EnumDescriptor( + name='OpenChannelErrorCode', + full_name='ascend.presenter.proto.OpenChannelErrorCode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='kOpenChannelErrorNone', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kOpenChannelErrorNoSuchChannel', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kOpenChannelErrorChannelAlreadyOpened', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kOpenChannelErrorOther', index=3, number=-1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=780, + serialized_end=945, +) +_sym_db.RegisterEnumDescriptor(_OPENCHANNELERRORCODE) + +OpenChannelErrorCode = enum_type_wrapper.EnumTypeWrapper(_OPENCHANNELERRORCODE) +_CHANNELCONTENTTYPE = _descriptor.EnumDescriptor( + name='ChannelContentType', + full_name='ascend.presenter.proto.ChannelContentType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='kChannelContentTypeImage', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kChannelContentTypeVideo', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=947, + serialized_end=1027, +) +_sym_db.RegisterEnumDescriptor(_CHANNELCONTENTTYPE) + +ChannelContentType = enum_type_wrapper.EnumTypeWrapper(_CHANNELCONTENTTYPE) +_IMAGEFORMAT = _descriptor.EnumDescriptor( + name='ImageFormat', + full_name='ascend.presenter.proto.ImageFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='kImageFormatJpeg', index=0, number=0, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1029, + serialized_end=1064, +) +_sym_db.RegisterEnumDescriptor(_IMAGEFORMAT) + +ImageFormat = enum_type_wrapper.EnumTypeWrapper(_IMAGEFORMAT) +_PRESENTDATAERRORCODE = _descriptor.EnumDescriptor( + name='PresentDataErrorCode', + full_name='ascend.presenter.proto.PresentDataErrorCode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='kPresentDataErrorNone', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kPresentDataErrorUnsupportedType', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kPresentDataErrorUnsupportedFormat', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kPresentDataErrorOther', index=3, number=-1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1067, + serialized_end=1231, +) +_sym_db.RegisterEnumDescriptor(_PRESENTDATAERRORCODE) + +PresentDataErrorCode = enum_type_wrapper.EnumTypeWrapper(_PRESENTDATAERRORCODE) +kOpenChannelErrorNone = 0 +kOpenChannelErrorNoSuchChannel = 1 +kOpenChannelErrorChannelAlreadyOpened = 2 +kOpenChannelErrorOther = -1 +kChannelContentTypeImage = 0 +kChannelContentTypeVideo = 1 +kImageFormatJpeg = 0 +kPresentDataErrorNone = 0 +kPresentDataErrorUnsupportedType = 1 +kPresentDataErrorUnsupportedFormat = 2 +kPresentDataErrorOther = -1 + + + +_OPENCHANNELREQUEST = _descriptor.Descriptor( + name='OpenChannelRequest', + full_name='ascend.presenter.proto.OpenChannelRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='channel_name', full_name='ascend.presenter.proto.OpenChannelRequest.channel_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='content_type', full_name='ascend.presenter.proto.OpenChannelRequest.content_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=51, + serialized_end=159, +) + + +_OPENCHANNELRESPONSE = _descriptor.Descriptor( + name='OpenChannelResponse', + full_name='ascend.presenter.proto.OpenChannelResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='error_code', full_name='ascend.presenter.proto.OpenChannelResponse.error_code', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='error_message', full_name='ascend.presenter.proto.OpenChannelResponse.error_message', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=161, + serialized_end=271, +) + + +_HEARTBEATMESSAGE = _descriptor.Descriptor( + name='HeartbeatMessage', + full_name='ascend.presenter.proto.HeartbeatMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=273, + serialized_end=291, +) + + +_COORDINATE = _descriptor.Descriptor( + name='Coordinate', + full_name='ascend.presenter.proto.Coordinate', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='x', full_name='ascend.presenter.proto.Coordinate.x', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='y', full_name='ascend.presenter.proto.Coordinate.y', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=293, + serialized_end=327, +) + + +_RECTANGLE_ATTR = _descriptor.Descriptor( + name='Rectangle_Attr', + full_name='ascend.presenter.proto.Rectangle_Attr', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='left_top', full_name='ascend.presenter.proto.Rectangle_Attr.left_top', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='right_bottom', full_name='ascend.presenter.proto.Rectangle_Attr.right_bottom', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='label_text', full_name='ascend.presenter.proto.Rectangle_Attr.label_text', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=330, + serialized_end=478, +) + + +_PRESENTIMAGEREQUEST = _descriptor.Descriptor( + name='PresentImageRequest', + full_name='ascend.presenter.proto.PresentImageRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='format', full_name='ascend.presenter.proto.PresentImageRequest.format', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='width', full_name='ascend.presenter.proto.PresentImageRequest.width', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='ascend.presenter.proto.PresentImageRequest.height', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data', full_name='ascend.presenter.proto.PresentImageRequest.data', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rectangle_list', full_name='ascend.presenter.proto.PresentImageRequest.rectangle_list', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=481, + serialized_end=664, +) + + +_PRESENTIMAGERESPONSE = _descriptor.Descriptor( + name='PresentImageResponse', + full_name='ascend.presenter.proto.PresentImageResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='error_code', full_name='ascend.presenter.proto.PresentImageResponse.error_code', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='error_message', full_name='ascend.presenter.proto.PresentImageResponse.error_message', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=666, + serialized_end=777, +) + +_OPENCHANNELREQUEST.fields_by_name['content_type'].enum_type = _CHANNELCONTENTTYPE +_OPENCHANNELRESPONSE.fields_by_name['error_code'].enum_type = _OPENCHANNELERRORCODE +_RECTANGLE_ATTR.fields_by_name['left_top'].message_type = _COORDINATE +_RECTANGLE_ATTR.fields_by_name['right_bottom'].message_type = _COORDINATE +_PRESENTIMAGEREQUEST.fields_by_name['format'].enum_type = _IMAGEFORMAT +_PRESENTIMAGEREQUEST.fields_by_name['rectangle_list'].message_type = _RECTANGLE_ATTR +_PRESENTIMAGERESPONSE.fields_by_name['error_code'].enum_type = _PRESENTDATAERRORCODE +DESCRIPTOR.message_types_by_name['OpenChannelRequest'] = _OPENCHANNELREQUEST +DESCRIPTOR.message_types_by_name['OpenChannelResponse'] = _OPENCHANNELRESPONSE +DESCRIPTOR.message_types_by_name['HeartbeatMessage'] = _HEARTBEATMESSAGE +DESCRIPTOR.message_types_by_name['Coordinate'] = _COORDINATE +DESCRIPTOR.message_types_by_name['Rectangle_Attr'] = _RECTANGLE_ATTR +DESCRIPTOR.message_types_by_name['PresentImageRequest'] = _PRESENTIMAGEREQUEST +DESCRIPTOR.message_types_by_name['PresentImageResponse'] = _PRESENTIMAGERESPONSE +DESCRIPTOR.enum_types_by_name['OpenChannelErrorCode'] = _OPENCHANNELERRORCODE +DESCRIPTOR.enum_types_by_name['ChannelContentType'] = _CHANNELCONTENTTYPE +DESCRIPTOR.enum_types_by_name['ImageFormat'] = _IMAGEFORMAT +DESCRIPTOR.enum_types_by_name['PresentDataErrorCode'] = _PRESENTDATAERRORCODE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +OpenChannelRequest = _reflection.GeneratedProtocolMessageType('OpenChannelRequest', (_message.Message,), dict( + DESCRIPTOR = _OPENCHANNELREQUEST, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.OpenChannelRequest) + )) +_sym_db.RegisterMessage(OpenChannelRequest) + +OpenChannelResponse = _reflection.GeneratedProtocolMessageType('OpenChannelResponse', (_message.Message,), dict( + DESCRIPTOR = _OPENCHANNELRESPONSE, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.OpenChannelResponse) + )) +_sym_db.RegisterMessage(OpenChannelResponse) + +HeartbeatMessage = _reflection.GeneratedProtocolMessageType('HeartbeatMessage', (_message.Message,), dict( + DESCRIPTOR = _HEARTBEATMESSAGE, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.HeartbeatMessage) + )) +_sym_db.RegisterMessage(HeartbeatMessage) + +Coordinate = _reflection.GeneratedProtocolMessageType('Coordinate', (_message.Message,), dict( + DESCRIPTOR = _COORDINATE, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.Coordinate) + )) +_sym_db.RegisterMessage(Coordinate) + +Rectangle_Attr = _reflection.GeneratedProtocolMessageType('Rectangle_Attr', (_message.Message,), dict( + DESCRIPTOR = _RECTANGLE_ATTR, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.Rectangle_Attr) + )) +_sym_db.RegisterMessage(Rectangle_Attr) + +PresentImageRequest = _reflection.GeneratedProtocolMessageType('PresentImageRequest', (_message.Message,), dict( + DESCRIPTOR = _PRESENTIMAGEREQUEST, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.PresentImageRequest) + )) +_sym_db.RegisterMessage(PresentImageRequest) + +PresentImageResponse = _reflection.GeneratedProtocolMessageType('PresentImageResponse', (_message.Message,), dict( + DESCRIPTOR = _PRESENTIMAGERESPONSE, + __module__ = 'presenter_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.proto.PresentImageResponse) + )) +_sym_db.RegisterMessage(PresentImageResponse) + + +# @@protoc_insertion_point(module_scope) diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/presenter_socket_server.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/presenter_socket_server.py new file mode 100644 index 0000000000000000000000000000000000000000..a28dd670fdd81bc86bd6f6a95beadb3d1a201409 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/common/presenter_socket_server.py @@ -0,0 +1,463 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +"""presenter socket server module""" + +import threading +import select +import struct +import logging +import socket +from google.protobuf.message import DecodeError +import common.presenter_message_pb2 as pb2 +from common.channel_manager import ChannelManager +from common.channel_handler import ChannelHandler + +#read nothing from socket.recv() +SOCK_RECV_NULL = b'' + +# epool will return if no event coming in 1 s +EPOLL_TIMEOUT = 1 + +# it specifies the number of unaccepted connections that +# the system will allow before refusing new connections. +SOCKET_WAIT_QUEUE = 2 + +# message head length, include 4 bytes message total length +# and 1 byte message name length +MSG_HEAD_LENGTH = 5 + + +class PresenterSocketServer(): + """a socket server communication with presenter agent. + + """ + def __init__(self, server_address): + """ + Args: + server_address: server listen address, + include an ipv4 address and a port. + """ + + # thread exit switch, if set true, thread must exit immediately. + self.thread_exit_switch = False + # message head length, include 4 bytes message total length + # and 1 byte message name length + self.msg_head_len = 5 + self._create_socket_server(server_address) + + def _create_socket_server(self, server_address): + """ + create a socket server + Args: + server_address: server listen address, + include an ipv4 address and a port. + """ + + # Create a socket server. + self._sock_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self._sock_server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self._sock_server.bind(server_address) + self._sock_server.listen(SOCKET_WAIT_QUEUE) + self._sock_server.setblocking(False) + + # Get server host name and port + host, port = self._sock_server.getsockname()[:2] + + # Start presenter socket server thread. + threading.Thread(target=self._server_listen_thread).start() + + # Display directly on the screen + print('Presenter socket server listen on %s:%s\n' % (host, port)) + + def set_exit_switch(self): + """set switch True to stop presenter socket server thread.""" + self.thread_exit_switch = True + + def _read_socket(self, conn, read_len): + ''' + Read fixed length data + Args: + conn: a socket connection + read_len: read fix byte. + Returns: + ret: True or False + buf: read fix byte buf. + ''' + has_read_len = 0 + read_buf = SOCK_RECV_NULL + total_buf = SOCK_RECV_NULL + while has_read_len != read_len: + try: + read_buf = conn.recv(read_len - has_read_len) + except socket.error: + logging.error("socket %u exception:socket.error", conn.fileno()) + return False, None + if read_buf == SOCK_RECV_NULL: + return False, None + total_buf += read_buf + has_read_len = len(total_buf) + + return True, total_buf + + def _read_msg_head(self, sock_fileno, conns): + ''' + Args: + sock_fileno: a socket fileno + conns: all socket connections which created by server. + Returns: + msg_total_len: total message length. + msg_name_len: message name length. + ''' + ret, msg_head = self._read_socket(conns[sock_fileno], self.msg_head_len) + if not ret: + logging.error("socket %u receive msg head null", sock_fileno) + return None, None + + # in Struct(), 'I' is unsigned int, 'B' is unsigned char + msg_head_data = struct.Struct('IB') + (msg_total_len, msg_name_len) = msg_head_data.unpack(msg_head) + msg_total_len = socket.ntohl(msg_total_len) + + return msg_total_len, msg_name_len + + def _read_msg_name(self, sock_fd, conns, msg_name_len): + ''' + Args: + sock_fd: a socket fileno + conns: all socket connections which created by server. + msg_name_len: message name length. + Returns: + ret: True or False + msg_name: message name. + ''' + ret, msg_name = self._read_socket(conns[sock_fd], msg_name_len) + if not ret: + logging.error("socket %u receive msg name null", sock_fd) + return False, None + try: + msg_name = msg_name.decode("utf-8") + except UnicodeDecodeError: + logging.error("msg name decode to utf-8 error") + return False, None + + return True, msg_name + + def _read_msg_body(self, sock_fd, conns, msg_body_len, msgs): + ''' + Args: + sock_fd: a socket fileno + conns: all socket connections which created by server. + msg_name_len: message name length. + msgs: msg read from a socket + Returns: + ret: True or False + ''' + ret, msg_body = self._read_socket(conns[sock_fd], msg_body_len) + if not ret: + logging.error("socket %u receive msg body null", sock_fd) + return False + msgs[sock_fd] = msg_body + return True + + def _read_sock_and_process_msg(self, sock_fileno, conns, msgs): + ''' + Args: + sock_fileno: a socket fileno, return value of socket.fileno() + conns: all socket connections registered in epoll + msgs: msg read from a socket + Returns: + ret: True or False + ''' + + # Step1: read msg head + msg_total_len, msg_name_len = self._read_msg_head(sock_fileno, conns) + if msg_total_len is None: + logging.error("msg_total_len is None.") + return False + + # Step2: read msg name + ret, msg_name = self._read_msg_name(sock_fileno, conns, msg_name_len) + if not ret: + return ret + + # Step3: read msg body + msg_body_len = msg_total_len - self.msg_head_len - msg_name_len + if msg_body_len < 0: + logging.error("msg_total_len:%u, msg_name_len:%u, msg_body_len:%u", + msg_total_len, msg_name_len, msg_body_len) + return False + ret = self._read_msg_body(sock_fileno, conns, msg_body_len, msgs) + if not ret: + return ret + + # Step4: process msg + ret = self._process_msg(conns[sock_fileno], msg_name, msgs[sock_fileno]) + return ret + + def _process_epollin(self, sock_fileno, epoll, conns, msgs): + ''' + Args: + sock_fileno: a socket fileno, return value of socket.fileno() + epoll: a set of select.epoll. + conns: all socket connections registered in epoll + msgs: msg read from a socket + ''' + msgs[sock_fileno] = b'' + try: + ret = self._read_sock_and_process_msg(sock_fileno, conns, msgs) + if not ret: + self._clean_connect(sock_fileno, epoll, conns, msgs) + except socket.error: + logging.error("receive socket error.") + self._clean_connect(sock_fileno, epoll, conns, msgs) + + def _accept_new_socket(self, epoll, conns): + ''' + Args: + epoll: a set of select.epoll. + conns: all socket connections registered in epoll + ''' + try: + new_conn, address = self._sock_server.accept() + new_conn.setblocking(True) + epoll.register(new_conn.fileno(), select.EPOLLIN | select.EPOLLHUP) + conns[new_conn.fileno()] = new_conn + logging.info("create new connection:client-ip:%s, client-port:%s, fd:%s", + address[0], address[1], new_conn.fileno()) + except socket.error: + logging.error("socket.error exception when sock.accept()") + + def _server_listen_thread(self): + """socket server thread, epoll listening all the socket events""" + epoll = select.epoll() + epoll.register(self._sock_server.fileno(), select.EPOLLIN | select.EPOLLHUP) + try: + conns = {} + msgs = {} + while True: + # thread must exit immediately + if self.thread_exit_switch: + break + + events = epoll.poll(EPOLL_TIMEOUT) + # timeout, but no event come, continue waiting + if not events: + continue + + for sock_fileno, event in events: + # new connection request from presenter agent + if self._sock_server.fileno() == sock_fileno: + self._accept_new_socket(epoll, conns) + + # remote connection closed + # it means presenter agent exit withot close socket. + elif event & select.EPOLLHUP: + logging.info("receive event EPOLLHUP") + self._clean_connect(sock_fileno, epoll, conns, msgs) + # new data coming in a socket connection + elif event & select.EPOLLIN: + self._process_epollin(sock_fileno, epoll, conns, msgs) + # receive event not recognize + else: + logging.error("not recognize event %f", event) + self._clean_connect(sock_fileno, epoll, conns, msgs) + + finally: + logging.info("conns:%s", conns) + logging.info("presenter server listen thread exit.") + epoll.unregister(self._sock_server.fileno()) + epoll.close() + self._sock_server.close() + + + def _process_heartbeat(self, conn): + ''' + set heartbeat + Args: + conn: a socket connection + Returns: + True: set heartbeat ok. + + ''' + sock_fileno = conn.fileno() + handler = self.channel_manager.get_channel_handler_by_fd(sock_fileno) + if handler is not None: + handler.set_heartbeat() + + return True + + def _process_open_channel(self, conn, msg_data): + """ + Deserialization protobuf and process open_channel request + Args: + conn: a socket connection + msg_data: a protobuf struct, include open channel request. + + Returns: + + protobuf structure like this: + ---------------------------------------------- + |channel_name | string | + |---------------------------------------------- + |content_type | ChannelContentType | + |---------------------------------------------- + + enum ChannelContentType { + kChannelContentTypeImage = 0; + kChannelContentTypeVideo = 1; + } + """ + request = pb2.OpenChannelRequest() + response = pb2.OpenChannelResponse() + + try: + request.ParseFromString(msg_data) + except DecodeError: + logging.error("ParseFromString exception: Error parsing message") + channel_name = "unknown channel" + return self._response_open_channel(conn, channel_name, response, + pb2.kOpenChannelErrorOther) + + channel_name = request.channel_name + + # check channel name if exist + if not self.channel_manager.is_channel_exist(channel_name): + logging.error("rrr channel name %s is not exist.", channel_name) + # if channel is not exist, need to create the channel + ret = self.channel_manager.register_one_channel(channel_name) + if ret != ChannelManager.err_code_ok: + logging.error("Create the channel %s failed!, and ret is %d", channel_name, ret) + err_code = pb2.kOpenChannelErrorOther + self._response_open_channel(conn, channel_name, response, err_code) + + # check channel path if busy + if self.channel_manager.is_channel_busy(channel_name): + logging.error("channel path %s is busy.", channel_name) + err_code = pb2.kOpenChannelErrorChannelAlreadyOpened + return self._response_open_channel(conn, channel_name, response, + err_code) + + # if channel type is image, need clean image if exist + self.channel_manager.clean_channel_image(channel_name) + + if request.content_type == pb2.kChannelContentTypeImage: + media_type = "image" + elif request.content_type == pb2.kChannelContentTypeVideo: + media_type = "video" + else: + logging.error("media type %s is not recognized.", + request.content_type) + return self._response_open_channel(conn, channel_name, response, + pb2.kOpenChannelErrorOther) + + handler = ChannelHandler(channel_name, media_type) + self.channel_manager.create_channel_resource( + channel_name, conn.fileno(), media_type, handler) + + return self._response_open_channel(conn, channel_name, response, + pb2.kOpenChannelErrorNone) + + def _response_open_channel(self, conn, channel_name, response, err_code): + """ + Assemble protobuf to response open_channel request + Args: + conn: a socket connection + channel_name: name of a channel. + response: a protobuf response to presenter agent + err_code: part of the response + + Returns: + ret_code:True or False + + Message structure like this: + -------------------------------------------------------------------- + |total message len | int | 4 bytes | + |------------------------------------------------------------------- + |message name len | byte | 1 byte | + |------------------------------------------------------------------- + |message name | string | xx bytes | + |------------------------------------------------------------------- + |message body | protobuf | xx bytes | + -------------------------------------------------------------------- + + protobuf structure like this: + -------------------------------------------------------------------- + |error_code | enum | OpenChannelErrorCode | + |------------------------------------------------------------------- + |error_message | string | xx bytes | + |------------------------------------------------------------------- + + enum OpenChannelErrorCode { + kOpenChannelErrorNone = 0; + kOpenChannelErrorNoSuchChannel = 1; + kOpenChannelErrorChannelAlreadyOpened = 2; + kOpenChannelErrorOther = -1; + } + """ + response.error_code = err_code + ret_code = False + if err_code == pb2.kOpenChannelErrorNoSuchChannel: + response.error_message = "channel {} not exist." \ + .format(channel_name) + elif err_code == pb2.kOpenChannelErrorChannelAlreadyOpened: + response.error_message = "channel {} is busy.".format(channel_name) + elif err_code == pb2.kOpenChannelErrorNone: + response.error_message = "open channel succeed" + ret_code = True + else: + response.error_message = "Unknown err open channel {}." \ + .format(channel_name) + + self.send_message(conn, response, pb2._OPENCHANNELRESPONSE.full_name) + return ret_code + + def send_message(self, conn, protobuf, msg_name): + ''' + API for send message + Args: + conn: a socket connection. + protobuf: message body defined in protobuf. + msg_name: msg name. + Returns: NA + ''' + message_data = protobuf.SerializeToString() + message_len = len(message_data) + + msg_name_size = len(msg_name) + msg_total_size = self.msg_head_len + msg_name_size + message_len + # in Struct(), 'I' is unsigned int, 'B' is unsigned char + s = struct.Struct('IB') + msg_head = (socket.htonl(msg_total_size), msg_name_size) + packed_msg_head = s.pack(*msg_head) + msg_data = packed_msg_head + \ + bytes(msg_name, encoding="utf-8") + message_data + conn.sendall(msg_data) diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/config/config.conf b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/config/config.conf new file mode 100644 index 0000000000000000000000000000000000000000..008784cb30ca0307d034738b09858b3e837fd806 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/config/config.conf @@ -0,0 +1,20 @@ +[baseconf] +# A socket server address to communicate with presenter agent +# Please ensure that the port does not conflict, only support Ipv4 +presenter_server_ip=192.168.1.223 +presenter_server_port=7008 + +# A http server address, you can visit the website by "http//web_server_ip:web_server_port". +# Only support Chrome now. +web_server_ip=192.168.1.223 +web_server_port=7009 + +# Storage path of Face registration information +# Please ensure the storage_dir exist before running the program +storage_dir=./facial + +# Maximum number of supported faces, range is 1-100 +max_face_num=100 + +# Face matching threshold, range is 0 - 1 +face_match_threshold = 0.5 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/config/logging.conf b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/config/logging.conf new file mode 100644 index 0000000000000000000000000000000000000000..554168c7df30a50b70b3e9f93de9b94a405b44e2 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/config/logging.conf @@ -0,0 +1,27 @@ +[loggers] +keys=root,facial_recognition + +[handlers] +keys=rotatingFileHandler + +[formatters] +keys=simpleFmt + +[logger_root] +level=DEBUG +handlers=rotatingFileHandler + +[logger_facial_recognition] +level=DEBUG +handlers=rotatingFileHandler +qualname=facial_recognition +propagate=0 + +[handler_rotatingFileHandler] +class=handlers.RotatingFileHandler +level=DEBUG +formatter=simpleFmt +args=("facial_recognition.log", "a", 10*1024*1024, 2) + +[formatter_simpleFmt] +format=%(asctime)s-%(levelname)s-%(filename)s:%(lineno)s %(message)s diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__init__.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/__init__.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe988b3c2030d351b6fe4590dd2344047da795c4 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/__init__.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/config_parser.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/config_parser.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa85a9d9142050b38e4da9638927d1cae63271df Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/config_parser.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_handler.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_handler.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73026f14c7f89db04392e67921605606b9bca19d Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_handler.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_message_pb2.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_message_pb2.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b02408bc56b43d21340cba41a5e57e65f6a106ce Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_message_pb2.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_server.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_server.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e7b6bfa8793912f7faed4d187ccc0bed1011160 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/facial_recognition_server.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/web.cpython-36.pyc b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/web.cpython-36.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b081b31166c4f842f34e1c153972cfb67c7f647 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/__pycache__/web.cpython-36.pyc differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/config_parser.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/config_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..372f73bbca8dfee4cf8bfbeefc0292e84a52f359 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/config_parser.py @@ -0,0 +1,109 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# + +"""facial recognition config parser module""" + +import os +import logging +import configparser +import common.parameter_validation as validate + + +class ConfigParser(): + """ parse configuration from the config.conf""" + __instance = None + + def __init__(self): + """init""" + + def __new__(cls): + """ensure class object is a single instance""" + if cls.__instance is None: + cls.__instance = object.__new__(cls) + cls.config_parser() + return cls.__instance + + def config_verify(self): + '''Verify configuration Parameters ''' + if not validate.validate_ip(ConfigParser.web_server_ip) or \ + not validate.validate_ip(ConfigParser.presenter_server_ip) or \ + not validate.validate_port(ConfigParser.web_server_port) or \ + not validate.validate_port(ConfigParser.presenter_server_port): + return False + + if not validate.validate_integer(ConfigParser.max_face_num, 0, 100): + print("Max face num should be 1-100.") + logging.warning("Max face num should be 1-100.") + return False + + threshold = ConfigParser.face_match_threshold + if not validate.validate_float(threshold, 0, 1): + print("Face match threshold should be 0-1.") + logging.warning("Face match threshold should be 0-1.") + return False + + if not os.path.isdir(ConfigParser.storage_dir): + print("You should create directory \"%s\" manually." + %(ConfigParser.storage_dir)) + logging.warning("You should create directory \"%s\" manually.", + ConfigParser.storage_dir) + return False + + return True + + @classmethod + def config_parser(cls): + """parser config from config.conf""" + config_parser = configparser.ConfigParser() + cls.root_path = ConfigParser.get_rootpath() + config_file = os.path.join(cls.root_path, "config/config.conf") + config_parser.read(config_file) + + # Read config parameter + cls.web_server_ip = config_parser.get('baseconf', 'web_server_ip') + cls.presenter_server_ip = \ + config_parser.get('baseconf', 'presenter_server_ip') + cls.web_server_port = config_parser.get('baseconf', 'web_server_port') + cls.presenter_server_port = \ + config_parser.get('baseconf', 'presenter_server_port') + cls.storage_dir = config_parser.get('baseconf', 'storage_dir') + cls.max_face_num = config_parser.get('baseconf', 'max_face_num') + cls.face_match_threshold = \ + config_parser.get('baseconf', 'face_match_threshold') + + @staticmethod + def get_rootpath(): + """get presenter server's root directory.""" + path = __file__ + idx = path.rfind("src") + + return path[0:idx] diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_handler.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..e45e96940f2ec35c809a92db757305c4c55897b1 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_handler.py @@ -0,0 +1,132 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +"""facial recognition channel handler module""" + +import time +import logging +from common.channel_handler import ChannelHandler + +HEARTBEAT_TIMEOUT = 100 + +class FacialRecognitionHandler(ChannelHandler): + '''FacialRecognitionHandler''' + def __init__(self, channel_name, media_type): + '''init func''' + self.sleep_time = 0.01 + super(FacialRecognitionHandler, self).__init__(channel_name, media_type) + + def save_frame(self, image, face_list): + """ + Description: save frame info + Input: + image: original image data + face_list: faces info, inlude name, face feature, face coordinate + Returns: NA + """ + while self.img_data: + time.sleep(self.sleep_time) + + # compute fps + self.time_list.append(self.heartbeat) + self.image_number += 1 + while self.time_list[0] + 1 < time.time(): + self.time_list.pop(0) + self.image_number -= 1 + if self.image_number == 0: + break + + self.fps = len(self.time_list) + self.img_data = image + self.face_list = face_list + self.image_event.set() + self.set_heartbeat() + + def get_frame(self): + """ + Description: get frame info + Input:NA + Returns: + { + "image": self.frame_data, + "fps": self.fps, + "face_list": self.face_list + } + """ + # wait util receive a frame data, and push it to your browser. + ret = self.web_event.wait() + self.web_event.clear() + # True: _web_event return because set() + # False: _web_event return because timeout + if ret: + return { + "image": self.frame_data, + "fps": self.fps, + "face_list": self.face_list + } + + return {} + + def frames(self): + """a generator generates image""" + while True: + self.image_event.wait() + self.image_event.clear() + if self.img_data: + yield self.img_data + self.img_data = None + + # if set _close_thread_switch, return immediately + if self.close_thread_switch: + yield None + + # if no frames or heartbeat coming in the last 100 seconds, + # stop the thread and close socket + if time.time() - self.heartbeat > HEARTBEAT_TIMEOUT: + self.set_thread_switch() + self.img_data = None + yield None + + def _video_thread(self): + """background thread to process video""" + logging.info('create %s...', (self.thread_name)) + for frame in self.frames(): + if frame: + # send signal to clients + self.frame_data = frame + self.web_event.set() + + # exit thread + if self.close_thread_switch: + self.channel_manager.clean_channel_resource_by_name( + self.channel_name) + logging.info('Stop thread:%s.', (self.thread_name)) + break diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_message_pb2.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_message_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..ec8fb18c8578dd1353b8a16eb43b270b17240c30 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_message_pb2.py @@ -0,0 +1,453 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# + +''' +Generated by the protocol buffer compiler. DO NOT EDIT! +source: facial_recognition_message.proto +''' +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='facial_recognition_message.proto', + package='ascend.presenter.facial_recognition', + syntax='proto3', + serialized_pb=_b('\n facial_recognition_message.proto\x12#ascend.presenter.facial_recognition\"^\n\x0e\x43ommonResponse\x12;\n\x03ret\x18\x01 \x01(\x0e\x32..ascend.presenter.facial_recognition.ErrorCode\x12\x0f\n\x07message\x18\x02 \x01(\t\"\'\n\x0bRegisterApp\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\"=\n\x03\x42ox\x12\x0c\n\x04lt_x\x18\x01 \x01(\r\x12\x0c\n\x04lt_y\x18\x02 \x01(\r\x12\x0c\n\x04rb_x\x18\x03 \x01(\r\x12\x0c\n\x04rb_y\x18\x04 \x01(\r\"T\n\x0b\x46\x61\x63\x65\x46\x65\x61ture\x12\x35\n\x03\x62ox\x18\x01 \x01(\x0b\x32(.ascend.presenter.facial_recognition.Box\x12\x0e\n\x06vector\x18\x02 \x03(\x02\"%\n\x08\x46\x61\x63\x65Info\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x05image\x18\x02 \x01(\x0c\"\xa2\x01\n\nFaceResult\x12\n\n\x02id\x18\x01 \x01(\t\x12\x45\n\x08response\x18\x02 \x01(\x0b\x32\x33.ascend.presenter.facial_recognition.CommonResponse\x12\x41\n\x07\x66\x65\x61ture\x18\x03 \x03(\x0b\x32\x30.ascend.presenter.facial_recognition.FaceFeature\"]\n\tFrameInfo\x12\r\n\x05image\x18\x01 \x01(\x0c\x12\x41\n\x07\x66\x65\x61ture\x18\x02 \x03(\x0b\x32\x30.ascend.presenter.facial_recognition.FaceFeature*\x7f\n\tErrorCode\x12\x0e\n\nkErrorNone\x10\x00\x12\x1a\n\x16kErrorAppRegisterExist\x10\x01\x12\x19\n\x15kErrorAppRegisterType\x10\x02\x12\x1a\n\x16kErrorAppRegisterLimit\x10\x03\x12\x0f\n\x0bkErrorOther\x10\x05\x62\x06proto3') +) + +_ERRORCODE = _descriptor.EnumDescriptor( + name='ErrorCode', + full_name='ascend.presenter.facial_recognition.ErrorCode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='kErrorNone', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kErrorAppRegisterExist', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kErrorAppRegisterType', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kErrorAppRegisterLimit', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='kErrorOther', index=4, number=5, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=658, + serialized_end=785, +) +_sym_db.RegisterEnumDescriptor(_ERRORCODE) + +ErrorCode = enum_type_wrapper.EnumTypeWrapper(_ERRORCODE) +kErrorNone = 0 +kErrorAppRegisterExist = 1 +kErrorAppRegisterType = 2 +kErrorAppRegisterLimit = 3 +kErrorOther = 5 + + + +_COMMONRESPONSE = _descriptor.Descriptor( + name='CommonResponse', + full_name='ascend.presenter.facial_recognition.CommonResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ret', full_name='ascend.presenter.facial_recognition.CommonResponse.ret', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='message', full_name='ascend.presenter.facial_recognition.CommonResponse.message', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=73, + serialized_end=167, +) + + +_REGISTERAPP = _descriptor.Descriptor( + name='RegisterApp', + full_name='ascend.presenter.facial_recognition.RegisterApp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='ascend.presenter.facial_recognition.RegisterApp.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='ascend.presenter.facial_recognition.RegisterApp.type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=169, + serialized_end=208, +) + + +_BOX = _descriptor.Descriptor( + name='Box', + full_name='ascend.presenter.facial_recognition.Box', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='lt_x', full_name='ascend.presenter.facial_recognition.Box.lt_x', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lt_y', full_name='ascend.presenter.facial_recognition.Box.lt_y', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rb_x', full_name='ascend.presenter.facial_recognition.Box.rb_x', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rb_y', full_name='ascend.presenter.facial_recognition.Box.rb_y', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=210, + serialized_end=271, +) + + +_FACEFEATURE = _descriptor.Descriptor( + name='FaceFeature', + full_name='ascend.presenter.facial_recognition.FaceFeature', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='box', full_name='ascend.presenter.facial_recognition.FaceFeature.box', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='vector', full_name='ascend.presenter.facial_recognition.FaceFeature.vector', index=1, + number=2, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=273, + serialized_end=357, +) + + +_FACEINFO = _descriptor.Descriptor( + name='FaceInfo', + full_name='ascend.presenter.facial_recognition.FaceInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='ascend.presenter.facial_recognition.FaceInfo.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='image', full_name='ascend.presenter.facial_recognition.FaceInfo.image', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=359, + serialized_end=396, +) + + +_FACERESULT = _descriptor.Descriptor( + name='FaceResult', + full_name='ascend.presenter.facial_recognition.FaceResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='ascend.presenter.facial_recognition.FaceResult.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response', full_name='ascend.presenter.facial_recognition.FaceResult.response', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='feature', full_name='ascend.presenter.facial_recognition.FaceResult.feature', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=399, + serialized_end=561, +) + + +_FRAMEINFO = _descriptor.Descriptor( + name='FrameInfo', + full_name='ascend.presenter.facial_recognition.FrameInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='image', full_name='ascend.presenter.facial_recognition.FrameInfo.image', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='feature', full_name='ascend.presenter.facial_recognition.FrameInfo.feature', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=563, + serialized_end=656, +) + +_COMMONRESPONSE.fields_by_name['ret'].enum_type = _ERRORCODE +_FACEFEATURE.fields_by_name['box'].message_type = _BOX +_FACERESULT.fields_by_name['response'].message_type = _COMMONRESPONSE +_FACERESULT.fields_by_name['feature'].message_type = _FACEFEATURE +_FRAMEINFO.fields_by_name['feature'].message_type = _FACEFEATURE +DESCRIPTOR.message_types_by_name['CommonResponse'] = _COMMONRESPONSE +DESCRIPTOR.message_types_by_name['RegisterApp'] = _REGISTERAPP +DESCRIPTOR.message_types_by_name['Box'] = _BOX +DESCRIPTOR.message_types_by_name['FaceFeature'] = _FACEFEATURE +DESCRIPTOR.message_types_by_name['FaceInfo'] = _FACEINFO +DESCRIPTOR.message_types_by_name['FaceResult'] = _FACERESULT +DESCRIPTOR.message_types_by_name['FrameInfo'] = _FRAMEINFO +DESCRIPTOR.enum_types_by_name['ErrorCode'] = _ERRORCODE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CommonResponse = _reflection.GeneratedProtocolMessageType('CommonResponse', (_message.Message,), dict( + DESCRIPTOR = _COMMONRESPONSE, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.CommonResponse) + )) +_sym_db.RegisterMessage(CommonResponse) + +RegisterApp = _reflection.GeneratedProtocolMessageType('RegisterApp', (_message.Message,), dict( + DESCRIPTOR = _REGISTERAPP, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.RegisterApp) + )) +_sym_db.RegisterMessage(RegisterApp) + +Box = _reflection.GeneratedProtocolMessageType('Box', (_message.Message,), dict( + DESCRIPTOR = _BOX, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.Box) + )) +_sym_db.RegisterMessage(Box) + +FaceFeature = _reflection.GeneratedProtocolMessageType('FaceFeature', (_message.Message,), dict( + DESCRIPTOR = _FACEFEATURE, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FaceFeature) + )) +_sym_db.RegisterMessage(FaceFeature) + +FaceInfo = _reflection.GeneratedProtocolMessageType('FaceInfo', (_message.Message,), dict( + DESCRIPTOR = _FACEINFO, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FaceInfo) + )) +_sym_db.RegisterMessage(FaceInfo) + +FaceResult = _reflection.GeneratedProtocolMessageType('FaceResult', (_message.Message,), dict( + DESCRIPTOR = _FACERESULT, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FaceResult) + )) +_sym_db.RegisterMessage(FaceResult) + +FrameInfo = _reflection.GeneratedProtocolMessageType('FrameInfo', (_message.Message,), dict( + DESCRIPTOR = _FRAMEINFO, + __module__ = 'facial_recognition_message_pb2' + # @@protoc_insertion_point(class_scope:ascend.presenter.facial_recognition.FrameInfo) + )) +_sym_db.RegisterMessage(FrameInfo) + + +# @@protoc_insertion_point(module_scope) diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_server.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_server.py new file mode 100644 index 0000000000000000000000000000000000000000..80600934c8aaecc44ec4ed8d890d37f041da7184 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/facial_recognition_server.py @@ -0,0 +1,730 @@ +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +"""presenter facial recognition server module""" + +import os +import json +import threading +import random +import logging +from logging.config import fileConfig +import numpy as np +from json.decoder import JSONDecodeError +from google.protobuf.message import DecodeError +import common.presenter_message_pb2 as presenter_message_pb2 +from common.channel_manager import ChannelManager +from common.presenter_socket_server import PresenterSocketServer +from common.app_manager import AppManager +import facial_recognition.src.facial_recognition_message_pb2 as pb2 +from facial_recognition.src.config_parser import ConfigParser +from facial_recognition.src.facial_recognition_handler import FacialRecognitionHandler + + +# Face Registration timeout is 10 seconds +FACE_REGISTER_TIME_OUT = 10 + +# Presenter Server Type +SERVER_TYPE = "facial_recognition" + +# max app name length +APP_ID_MAX_LENGTH = 20 + +# max support 2 app connect +MAX_APP_NUM = 2 + +# length of face feature vector +FEATURE_VECTOR_LENGTH = 1024 + +# Face Registration Status code +FACE_REGISTER_STATUS_WAITING = 1 +FACE_REGISTER_STATUS_SUCCEED = 2 +FACE_REGISTER_STATUS_FAILED = 3 + +class FacialRecognitionServer(PresenterSocketServer): + '''A server for face recognition''' + def __init__(self, config): + """ + Description: class init func + Input: + config: config information + Returns: NA + """ + server_address = (config.presenter_server_ip, + int(config.presenter_server_port)) + super(FacialRecognitionServer, self).__init__(server_address) + self.storage_dir = config.storage_dir + self.max_face_num = int(config.max_face_num) + self.face_match_threshold = float(config.face_match_threshold) + self.register_dict = {} + self.app_manager = AppManager() + self.channel_manager = ChannelManager() + self.face_register_file = os.path.join(self.storage_dir, + "registered_faces.json") + self._init_face_database() + + def _init_face_database(self): + """ + Description: Init face recognition database, + read information from face_register_file + Input: NA + Returns: NA + """ + if not os.path.isfile(self.face_register_file): + with open(self.face_register_file, "w", encoding="utf-8") as f: + f.write("{}") + + with open(self.face_register_file, "r") as f: + self.face_lock = threading.Lock() + self.registered_faces = json.load(f) + self._filter_registration_data() + + def _filter_registration_data(self): + face_dict = self.registered_faces.copy() + for i in face_dict: + image_path = os.path.join(self.storage_dir, i + ".jpg") + if not os.path.isfile(image_path): + del self.registered_faces[i] + + def get_all_face(self): + """ + Description: get registered face list. + Input: NA + Returns: NA + """ + with self.face_lock: + return [i for i in self.registered_faces] + + def save_face_image(self, name, image): + """ + Description: save face image. + Input: + name face name + image: face image + Returns: True or False + """ + image_file = os.path.join(self.storage_dir, name + ".jpg") + try: + #image = image.decode("utf-8") + with open(image_file, "wb") as f: + f.write(image) + return True + except (OSError, TypeError) as exp: + logging.error(exp) + return False + + def get_app_socket(self, app_id): + """ + Description: get a socket which is bound to the app. + Input: + app_id: id of the app + Returns: socket + """ + return self.app_manager.get_socket_by_app_id(app_id) + + def list_registered_apps(self): + """ + Description: get registered apps list. + Input: NA + Returns: app list + """ + return self.app_manager.list_app() + + def delete_faces(self, name_list): + """ + Description: delete registered faces in name_list + Input: + name_list: a name list + Returns: True or False + """ + with self.face_lock: + for i in name_list: + if self.registered_faces.get(i): + backup = self.registered_faces[i] + del self.registered_faces[i] + try: + with open(self.face_register_file, "w") as f: + json.dump(self.registered_faces, f) + image_file = os.path.join( + self.storage_dir, i + ".jpg") + os.remove(image_file) + except (OSError, JSONDecodeError) as exp: + logging.error(exp) + self.registered_faces[i] = backup + return False + return True + + def _clean_connect(self, sock_fileno, epoll, conns, msgs): + """ + Description: close socket, and clean local variables + Input: + sock_fileno: a socket fileno, return value of socket.fileno() + epoll: a set of select.epoll. + conns: all socket connections registered in epoll + msgs: msg read from a socket + """ + logging.info("clean fd:%s, conns:%s", sock_fileno, conns) + self.app_manager.unregister_app_by_fd(sock_fileno) + epoll.unregister(sock_fileno) + conns[sock_fileno].close() + del conns[sock_fileno] + del msgs[sock_fileno] + + + def _process_msg(self, conn, msg_name, msg_data): + """ + Total entrance to process protobuf msg + Input: + conn: a socket connection + msg_name: name of a msg. + msg_data: msg body, serialized by protobuf + + Returns: + False:somme error occured + True:succeed + + """ + # process open channel request + if msg_name == pb2._REGISTERAPP.full_name: + ret = self._process_register_app(conn, msg_data) + # process image request, receive an image data from presenter agent + elif msg_name == pb2._FACERESULT.full_name: + ret = self._process_face_result(msg_data) + elif msg_name == pb2._FRAMEINFO.full_name: + ret = self._process_frame_info(conn, msg_data) + elif msg_name == presenter_message_pb2._OPENCHANNELREQUEST.full_name: + ret = self._process_open_channel(conn, msg_data) + # process heartbeat request, it used to keepalive a channel path + elif msg_name == presenter_message_pb2._HEARTBEATMESSAGE.full_name: + ret = self._process_heartbeat(conn) + else: + logging.error("Not recognized msg type %s", msg_name) + ret = False + + return ret + + def _process_heartbeat(self, conn): + ''' + set heartbeat + Input: + conn: a socket connection + Returns: + True: set heartbeat ok. + + ''' + sock_fileno = conn.fileno() + if self.app_manager.get_app_id_by_socket(sock_fileno): + self.app_manager.set_heartbeat(sock_fileno) + + handler = self.channel_manager.get_channel_handler_by_fd(sock_fileno) + if handler is not None: + handler.set_heartbeat() + return True + + def _parse_protobuf(self, protobuf, msg_data): + """ + Description: parse protobuf + Input: + protobuf: a struct defined by protobuf + msg_data: msg body, serialized by protobuf + Returns: True or False + """ + try: + protobuf.ParseFromString(msg_data) + return True + except DecodeError as exp: + logging.error(exp) + return False + + def _process_register_app(self, conn, msg_data): + """ + Description: process register_app message + Input: + conn: a socket connection + msg_data: msg body, serialized by protobuf + Returns: True or False + """ + request = pb2.RegisterApp() + response = pb2.CommonResponse() + msg_name = pb2._COMMONRESPONSE.full_name + if not self._parse_protobuf(request, msg_data): + response.ret = pb2.kErrorOther + response.message = "ParseFromString exception" + self.send_message(conn, response, msg_name) + return False + + app_id = request.id + app_type = request.type + + # check app id if exist + if self.app_manager.is_app_exist(app_id): + logging.error("App %s is already exist.", app_id) + response.ret = pb2.kErrorAppRegisterExist + response.message = "App {} is already exist.".format(app_id) + self.send_message(conn, response, msg_name) + elif self.app_manager.get_app_num() >= MAX_APP_NUM: + logging.error("App number reach the upper limit") + response.ret = pb2.kErrorAppRegisterLimit + response.message = "App number reach the upper limit" + self.send_message(conn, response, msg_name) + elif app_type != SERVER_TYPE: + logging.error("App type %s error", app_type) + response.ret = pb2.kErrorAppRegisterType + response.message = "App type {} error".format(app_type) + self.send_message(conn, response, msg_name) + elif len(app_id) > APP_ID_MAX_LENGTH: + logging.error("App id %s is too long", app_id) + response.ret = pb2.kErrorOther + response.message = "App id: {} is too long".format(app_id) + self.send_message(conn, response, msg_name) + else: + self.app_manager.register_app(app_id, conn) + response.ret = pb2.kErrorNone + response.message = "Register app {} succeed".format(app_id) + self.send_message(conn, response, msg_name) + return True + + return False + + def _process_face_result(self, msg_data): + """ + Description: process face_result message + Input: + msg_data: msg body, serialized by protobuf + Returns: True or False + """ + face_result = pb2.FaceResult() + if not self._parse_protobuf(face_result, msg_data): + return False + + face_id = face_result.id + if not self.register_dict.get(face_id): + logging.warning("face id %s is already deleted", face_id) + return True + + ret = face_result.response.ret + if ret != pb2.kErrorNone: + err_msg = face_result.response.message + logging.error("get face feature error message: %s", err_msg) + status = FACE_REGISTER_STATUS_FAILED + message = "Get face feature failed" + self._update_register_dict(face_id, status, message) + return True + + face_num = len(face_result.feature) + if face_num == 0: + status = FACE_REGISTER_STATUS_FAILED + message = "No face recognized" + self._update_register_dict(face_id, status, message) + elif face_num > 1: + status = FACE_REGISTER_STATUS_FAILED + message = "{} faces recognized".format(face_num) + self._update_register_dict(face_id, status, message) + else: + box = face_result.feature[0].box + face_coordinate = [box.lt_x, box.lt_y, box.rb_x, box.rb_x] + feature_vector = [i for i in face_result.feature[0].vector] + if len(feature_vector) != FEATURE_VECTOR_LENGTH: + logging.error("feature_vector length not equal 1024") + status = FACE_REGISTER_STATUS_FAILED + message = "Face feature vector length invalid" + self._update_register_dict(face_id, status, message) + return True + return self._save_face_feature(face_id, face_coordinate, + feature_vector) + + return True + + def _update_register_dict(self, face_id, status, message): + """ + Description: update register_dict + Input: + face_id: id of face + status: status of face register + message: message of status of face register + Returns: True or False + """ + if self.register_dict.get(face_id): + self.register_dict[face_id]["status"] = status + self.register_dict[face_id]["message"] = message + self.register_dict[face_id]["event"].set() + + def _save_face_feature(self, face_id, face_coordinate, feature_vector): + """ + Description: save face_feature + Input: + face_id: id of face + face_coordinate: face coordinates + feature_vector: face feature vector + Returns: True or False + """ + with self.face_lock: + self.registered_faces[face_id] = { + "coordinate": face_coordinate, + "feature": feature_vector + } + try: + with open(self.face_register_file, "w") as f: + json.dump(self.registered_faces, f) + status = FACE_REGISTER_STATUS_SUCCEED + message = "Successful registration" + self._update_register_dict(face_id, status, message) + return True + except (OSError, JSONDecodeError) as exp: + logging.error(exp) + del self.registered_faces[face_id] + status = FACE_REGISTER_STATUS_FAILED + message = "save face feature to json file failed" + self._update_register_dict(face_id, status, message) + return False + + def _process_open_channel(self, conn, msg_data): + """ + Description: process open channel message + Input: + conn: a socket connection + msg_data: msg body, serialized by protobuf + Returns: True or False + """ + request = presenter_message_pb2.OpenChannelRequest() + response = presenter_message_pb2.OpenChannelResponse() + if not self._parse_protobuf(request, msg_data): + channel_name = "unknown channel" + err_code = presenter_message_pb2.kOpenChannelErrorOther + return self._response_open_channel(conn, channel_name, + response, err_code) + channel_name = request.channel_name + + # check channel name if exist + if not self.channel_manager.is_channel_exist(channel_name): + logging.error("channel name %s is not exist.", channel_name) + err_code = presenter_message_pb2.kOpenChannelErrorNoSuchChannel + return self._response_open_channel(conn, channel_name, + response, err_code) + #ret = self.channel_manager.register_one_channel(channel_name) + #if ret != ChannelManager.err_code_ok: + # logging.error("Create the channel %s failed!, and ret is %d", channel_name, ret) + # err_code = pb2.kOpenChannelErrorOther + # self._response_open_channel(conn, channel_name, response, err_code) + + # check channel path if busy + if self.channel_manager.is_channel_busy(channel_name): + logging.error("channel path %s is busy.", channel_name) + err = presenter_message_pb2.kOpenChannelErrorChannelAlreadyOpened + return self._response_open_channel(conn, channel_name, + response, err) + + content_type = presenter_message_pb2.kChannelContentTypeVideo + if request.content_type == content_type: + media_type = "video" + else: + logging.error("media type %s is not recognized.", + request.content_type) + err_code = presenter_message_pb2.kOpenChannelErrorOther + return self._response_open_channel(conn, channel_name, + response, err_code) + + handler = FacialRecognitionHandler(channel_name, media_type) + sock = conn.fileno() + self.channel_manager.create_channel_resource(channel_name, sock, + media_type, handler) + err_code = presenter_message_pb2.kOpenChannelErrorNone + return self._response_open_channel(conn, channel_name, + response, err_code) + + def _process_frame_info(self, conn, msg_data): + """ + Description: process frame info message + Input: + conn: a socket connection + msg_data: msg body, serialized by protobuf + Returns: True or False + """ + request = pb2.FrameInfo() + response = pb2.CommonResponse() + msg_name = pb2._COMMONRESPONSE.full_name + if not self._parse_protobuf(request, msg_data): + return False + + sock_fileno = conn.fileno() + handler = self.channel_manager.get_channel_handler_by_fd(sock_fileno) + if handler is None: + logging.error("get channel handler failed") + response.ret = pb2.kErrorOther + response.message = "channel error." + self.send_message(conn, response, msg_name) + return False + + face_list = self._recognize_face(request.feature) + handler.save_frame(request.image, face_list) + response.ret = pb2.kErrorNone + response.message = "process frame info suceed." + self.send_message(conn, response, msg_name) + return True + + def _recognize_face(self, face_feature): + """ + Description: recognize which face it is. + Input: + face_feature: face feature + Returns: face list + """ + face_list = [] + for i in face_feature: + face_info = {} + box = i.box + coordinate = [box.lt_x, box.lt_y, box.rb_x, box.rb_y] + feature_vector = i.vector + if len(feature_vector) != FEATURE_VECTOR_LENGTH: + logging.error("feature_vector length not equal 1024") + continue + + (name, score) = self._compute_face_feature(feature_vector) + face_info["coordinate"] = coordinate + face_info["name"] = name + face_info["confidence"] = score + face_list.append(face_info) + + return face_list + + def _compute_face_feature(self, feture_vector): + """ + Description: compute score of the feture_vector + Input: + feture_vector: face feature vector + Returns: face name and score + """ + highest_score_face = "Unknown" + highest_score = 0 + with self.face_lock: + for i in self.registered_faces: + feature = self.registered_faces[i]["feature"] + score = self._compute_similar_degree(feature, feture_vector) + if score < self.face_match_threshold: + continue + + if score > highest_score: + highest_score = score + highest_score_face = i + return (highest_score_face, highest_score) + + def _compute_similar_degree(self, feture_vector1, feture_vector2): + """ + Description: compute cosine similarity of two vectors + Input: + feture_vector1: face feature vector + feture_vector2: face feature vector + Returns: score + """ + vector1 = np.array(feture_vector1) + vector2 = np.array(feture_vector2) + square_diff = ((np.linalg.norm(vector1)) * (np.linalg.norm(vector2))) + score = np.dot(vector1, vector2) / square_diff + return score + + + def stop_thread(self): + """ + Description: clean thread when process exit. + Input: NA + Returns: NA + """ + channel_manager = ChannelManager([]) + channel_manager.close_all_thread() + self.set_exit_switch() + self.app_manager.set_thread_switch() + + +class FacialRecognitionManager(): + '''Manager of Face Recognition, a class providing APIs''' + __instance = None + server = None + + def __init__(self, server=None): + '''init func''' + + def __new__(cls, server=None): + """ensure only a single instance created. """ + if cls.__instance is None: + cls.__instance = object.__new__(cls) + cls.server = server + return cls.__instance + + def _choose_random_app(self): + """ + Description: choose a random app online. + Input: NA + Returns: a app name + """ + app_list = self.server.list_registered_apps() + if app_list: + index = random.randint(0, len(app_list) - 1) + return app_list[index] + return None + + def get_app_list(self): + """ + Description: API for getting online app list + Input: NA + Returns: app list + """ + return self.server.list_registered_apps() + + def register_face(self, name, image): + """ + Description: API for registering face + Input: + name: a face name + image: a face picture + Returns: (ret, msg) + """ + + # Input para check + if not isinstance(name, str): + return (False, "Name is not string") + + if not isinstance(image, bytes): + return (False, "Image is not bytes") + + if self._get_face_number() >= self.server.max_face_num: + return (False, "Face number limit") + + app_id = self._choose_random_app() + if app_id is None: + return (False, "No app is online") + + conn = self.server.get_app_socket(app_id) + if conn is None: + return (False, "Internal Error, app lost socket") + + # Prepare sending face register message to agent + request = pb2.FaceInfo() + request.id = name + request.image = image + + register_dict = self.server.register_dict + register_dict[name] = { + "status": FACE_REGISTER_STATUS_WAITING, + "message": "", + "event": threading.Event() + } + + msg_name = pb2._FACEINFO.full_name + self.server.send_message(conn, request, msg_name) + register_dict[name]["event"].wait(FACE_REGISTER_TIME_OUT) + if register_dict[name]["status"] == FACE_REGISTER_STATUS_WAITING: + logging.warning("Register face %s timeout", name) + del register_dict[name] + return (False, "10 sec Timeout") + + if register_dict[name]["status"] == FACE_REGISTER_STATUS_FAILED: + err_msg = register_dict[name]["message"] + logging.error("Register face %s failed, reason:%s", + name, register_dict[name]["message"]) + del register_dict[name] + return (False, err_msg) + + ret = self.server.save_face_image(name, image) + del register_dict[name] + if ret: + logging.info("Register face %s succeed", name) + return (True, "Successful Registration") + + logging.error("Save face %s to database failed", name) + return (False, "Save database error") + + def unregister_face(self, name_list): + """ + Description: API for unregistering faces + Input: + name_list: a name list which will be deleted. + Returns: True or False + """ + if isinstance(name_list, list): + return self.server.delete_faces(name_list) + logging.error("unregister face fail") + return False + + def get_all_face_name(self): + """ + Description: API for geting all registered face names + Input: NA + Returns: a name list + """ + return self.server.get_all_face() + + def _get_face_number(self): + """ + Description: geting total face number + Input: NA + Returns: total face number + """ + return len(self.get_all_face_name()) + + def get_faces(self, name_list): + """ + Description: API for geting specified face info. + Input: a name list. + Returns: a list include face name and image. + """ + if not isinstance(name_list, list): + return [] + + face_list = [] + for i in name_list: + face_info = {} + face_info["name"] = i + try: + image_file = os.path.join(self.server.storage_dir, i + ".jpg") + face_info["image"] = open(image_file, 'rb').read() + except OSError as exp: + logging.error(exp) + continue + face_list.append(face_info) + + return face_list + +def run(): + '''Face Recognition server startup function''' + # read config file + config = ConfigParser() + + # config log + log_file_path = os.path.join(ConfigParser.root_path, "config/logging.conf") + fileConfig(log_file_path) + logging.getLogger('facial_recognition') + + if not config.config_verify(): + return None + + server = FacialRecognitionServer(config) + FacialRecognitionManager(server) + return server diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/web.py b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/web.py new file mode 100644 index 0000000000000000000000000000000000000000..75838bdf99143df8537e80a6be4c8cf3b0a8078c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/src/web.py @@ -0,0 +1,525 @@ +# +# ======================================================================= +# +# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1 Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2 Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3 Neither the names of the copyright holders nor the names of the +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# ======================================================================= +# +""" +web application for presenter server. +""" +import os +import re +import random +import base64 +import threading +import time +import logging +import tornado.ioloop +import tornado.web +import tornado.gen +import tornado.websocket +import common.channel_manager as channel_manager +import facial_recognition.src.config_parser as config_parser +import facial_recognition.src.facial_recognition_server as facial_recognition_server + +# app +G_WEBAPP = None + +# jpeg base64 header +JPEG_BASE64_HEADER = "data:image/jpeg;base64," + +# get request +REQUEST = "req" + +# get appname +APP_NAME = "app_name" + +# get username +USER_NAME = "user_name" + +# get image +IMAGE = "image_data" + +# return code +RET_CODE_SUCCESS = "0" +RET_CODE_FAIL = "1" +RET_CODE_LOADING = "2" + + +class WebApp: + """ + web application + """ + __instance = None + def __init__(self): + """ + init method + """ + self.channel_mgr = channel_manager.ChannelManager() + + self.facial_recognize_manage = facial_recognition_server.FacialRecognitionManager() + + self.request_list = set() + + self.lock = threading.Lock() + + self.videostate = {"ret":0,"msg":{"name":"","req":0}} + + + def __new__(cls, *args, **kwargs): + # if instance is None than create one + if cls.__instance is None: + cls.__instance = object.__new__(cls, *args, **kwargs) + return cls.__instance + + + def get_videostate(self): + '''get video state''' + tmpvideostate = self.videostate + tmplist = self.facial_recognize_manage.get_app_list() + if tmpvideostate["ret"] == 1 and tmpvideostate["msg"]["name"] in tmplist: + return self.videostate + else: + tmpvideostate["ret"] = 0 + self.videostate = tmpvideostate + return tmpvideostate + + + def list_registered_apps(self): + '''get registered apps''' + app_list = self.facial_recognize_manage.get_app_list() + ret = [] + idx = 1 + for item in app_list: + ret.append({"id":idx, "appname":item}) + idx = idx+1 + + return ret + + + def is_channel_exists(self, name): + '''check is app exists''' + return self.channel_mgr.is_channel_exist(name) + + + def register_face(self, user_name, image_data): + ''' register user face''' + ret = {"ret":RET_CODE_FAIL, "msg":""} + + if user_name is None: + logging.info("User name is None, register face failed") + ret["msg"] = "User name can not be empty" + return ret + + #strip user name + user_name = user_name.strip() + + if user_name == "": + logging.info("User name is empty, register face failed") + ret["msg"] = "User name can not be empty" + return ret + + if len(user_name) > 50: + logging.info("Length of User name %s > 50 , register face failed", user_name) + ret["msg"] = "Length of User name should less than 50" + return ret + + if image_data is None: + logging.info("Image data is None, register face failed") + ret["msg"] = "Image data can not be empty" + return ret + + # define pattern support a-z A-Z and / + pattern = re.compile(r"[a-z]|[A-Z]|[0-9]|(\s)") + tmp = pattern.findall(user_name) + + # check reuslt changed or not + if len(tmp) != len(user_name): + logging.info("%s contain invalidate character, add channel failed", user_name) + ret["msg"] = "Channel name only support 0-9, a-z, A-Z /" + return ret + + # check image base64 code + if len(image_data) <= len(JPEG_BASE64_HEADER): + logging.info("Invalid jpeg base64 header identifier") + ret["msg"] = "Just support image in jpg/jpeg format" + return ret + + # jpeg base64 header check + if image_data[0:len(JPEG_BASE64_HEADER)] != JPEG_BASE64_HEADER: + logging.info("Invalid jpeg base64 header identifier") + ret["msg"] = "Just support image in jpg/jpeg format" + return ret + + # remove base64 header "data:image/jpeg;base64,"" + img_data = image_data[len(JPEG_BASE64_HEADER):len(image_data)] + + try: + #convert to binary data + decode_img = base64.b64decode(img_data) + except (ValueError, TypeError) as exp: + logging.error(exp) + return {"ret":RET_CODE_FAIL, "msg":"Image decode error"} + + flag = self.facial_recognize_manage.register_face(user_name, decode_img) + + if flag[0] is True: + logging.info("Register face success") + ret = {"ret":RET_CODE_SUCCESS, "msg":flag[1]} + else: + logging.info("Register face failed") + ret = {"ret":RET_CODE_FAIL, "msg":flag[1]} + + return ret + + def unregister_face(self, name_list): + '''delete regeistered face''' + ret = {"ret":RET_CODE_FAIL, "msg":""} + + if not name_list: + logging.info("Name list is empty,delete name failed") + ret["msg"] = "Name list should not be empty" + return ret + + flag = self.facial_recognize_manage.unregister_face(name_list) + + if flag is False: + ret["ret"] = RET_CODE_FAIL + ret["msg"] = "Delete face failed" + logging.info("Delete face failed") + elif flag is True: + ret["ret"] = RET_CODE_SUCCESS + ret["msg"] = "Delete face success" + logging.info("Delete face success") + + return ret + + + def list_allface(self): + '''list all users face''' + name_list = self.facial_recognize_manage.get_all_face_name() + + if not name_list: + return [] + + name_list = sorted(name_list) + show_face = self.facial_recognize_manage.get_faces(name_list) + for item in show_face: + try: + #convert binary data to base64 + item["image"] = JPEG_BASE64_HEADER + base64.b64encode(item["image"]).decode("utf-8") + except (ValueError, TypeError) as exp: + logging.error(exp) + return [] + + return show_face + + def list_allfacename(self): + '''list all register user name''' + return self.facial_recognize_manage.get_all_face_name() + + + # Input:appname + # Output: + # { + # status: ok/error + # image: image data + # fps: frame per second + # face_list: [{ + # "name":value, + # "confidence":value + # "coordinate":[lt_x, lt_y, rb_x, rb_y] + # }] } + # } + def get_media_data(self, app_name): + '''get media data''' + ret = {"ret":RET_CODE_FAIL, "image":"", "fps":"0", "face_list":""} + + if self.is_channel_exists(app_name) is False: + return ret + + handler = self.channel_mgr.get_channel_handler_by_name(app_name) + + ret["ret"] = RET_CODE_LOADING + + if handler is not None: + frame_info = handler.get_frame() + else: + return ret + + if not frame_info: + return ret + + try: + ret["image"] = base64.b64encode(frame_info["image"]).decode("utf-8") + except (TypeError, ValueError) as exp: + logging.error(exp) + return ret + + ret["ret"] = RET_CODE_SUCCESS + ret["fps"] = frame_info["fps"] + ret["face_list"] = frame_info["face_list"] + + return ret + + def add_requst(self, request): + """ + add request + + @param requst: request item to be stored + + @note: request can not be same with other request. + request is identified by (channel name ,random number) + so this method do not return value. + """ + with self.lock: + self.request_list.add(request) + self.videostate = {"ret":1,"msg":{"name":request[1],"req":request[0]}} + + def has_request(self, request): + """ + whether request exist or not + + @param request: request to be checked. + @return: return True if exists, otherwise return False. + """ + with self.lock: + + for item in self.request_list: + + # check request equal + if item[0] == request[0] and item[1] == request[1]: + return True + + return False + +# pylint: disable=abstract-method +class BaseHandler(tornado.web.RequestHandler): + """ + base handler. + """ + +# pylint: disable=abstract-method +class ApplistHandler(BaseHandler): + """ + handler index request + """ + + @tornado.web.asynchronous + def get(self): + """ + handle home or index request only for get + """ + # self.render("applist.html", listret=G_WEBAPP.list_registered_apps()) + self.render("home.html", listret=(G_WEBAPP.list_registered_apps(), G_WEBAPP.list_allface(), G_WEBAPP.get_videostate())) + +# pylint: disable=abstract-method +class RegisterHandler(BaseHandler): + """ + handler register face + """ + @tornado.web.asynchronous + def post(self): + """ + handle reqeust for register face + """ + user_name = self.get_argument(USER_NAME, '') + name_list = G_WEBAPP.list_allfacename() + + # check user name is duplicate + for item in name_list: + if user_name == item: + self.finish({"ret":RET_CODE_FAIL, "msg":"user name has existed"}) + return None + + image_data = self.get_argument(IMAGE, '') + + self.finish(G_WEBAPP.register_face(user_name, image_data)) + return None + + +# pylint: disable=abstract-method +class DelFaceHandler(BaseHandler): + """ + handler delete request + """ + @tornado.web.asynchronous + def post(self): + """ + handel requst for delete channel + """ + name_list = self.get_arguments(USER_NAME) + + self.finish(G_WEBAPP.unregister_face(name_list)) + + +# pylint: disable=abstract-method +class ViewHandler(BaseHandler): + """ + handler view request + """ + @tornado.web.asynchronous + def get(self): + """ + handler request for view channel + """ + channel_name = self.get_argument(APP_NAME, '') + if G_WEBAPP.is_channel_exists(channel_name): + req_id = str(random.random()) + G_WEBAPP.add_requst((req_id, channel_name)) + self.finish({"ret":RET_CODE_SUCCESS,"msg":req_id}) + else: + self.finish({"ret":RET_CODE_FAIL,"msg":"Channel not exist"}) + +# pylint: disable=abstract-method + +class WebSocket(tornado.websocket.WebSocketHandler): + """ + web socket for web page socket quest + """ + def open(self): + """ + called when client request by ws or wss + """ + + self.req_id = self.get_argument(REQUEST, '', True) + self.channel_name = self.get_argument(APP_NAME, '', True) + + # check request valid or not. + if not G_WEBAPP.has_request((self.req_id, self.channel_name)): + self.close() + + + @staticmethod + def send_message(obj, message, binary=False): + """ + send message to client. + """ + + # check socket exist or not + if not obj.ws_connection or not obj.ws_connection.stream.socket: + return False + + ret = False + try: + obj.write_message(message, binary) + ret = True + except tornado.websocket.WebSocketClosedError: + ret = False + + return ret + + + def on_close(self): + """ + called when closed web socket + """ + + @tornado.web.asynchronous + @tornado.gen.coroutine + def on_message(self, message): + """ + On recv message from client. + """ + if message == "next": + self.run_task() + + + def run_task(self): + """ + send image to client + """ + + # check channel valid + if not G_WEBAPP.is_channel_exists(self.channel_name) or \ + not G_WEBAPP.has_request((self.req_id, self.channel_name)): + self.close() + return + + result = G_WEBAPP.get_media_data(self.channel_name) + + # sleep 100ms if status not ok for frequently query + if result['ret'] != RET_CODE_SUCCESS: + time.sleep(0.1) + + # if channel not exist close websocket. + if result['ret'] == RET_CODE_FAIL: + self.close() + # send message to client + else: + # close websoket when send failed or for image channel. + ret = WebSocket.send_message(self, result) + + +def get_webapp(): + """ + start web applicatioin + """ + # get template file and static file path. + templatepath = os.path.join(config_parser.ConfigParser.get_rootpath(), "ui/templates") + staticfilepath = os.path.join(config_parser.ConfigParser.get_rootpath(), "ui/static") + + # create application object. + app = tornado.web.Application(handlers=[(r"/", ApplistHandler), + (r"/register", RegisterHandler), + (r"/delete", DelFaceHandler), + (r"/view", ViewHandler), + (r"/static/(.*)", + tornado.web.StaticFileHandler, + {"path": staticfilepath}), + (r"/websocket", WebSocket)], + template_path=templatepath) + + # create server + http_server = tornado.httpserver.HTTPServer(app) + + return http_server + + +def start_webapp(): + """ + start webapp + """ + global G_WEBAPP + G_WEBAPP = WebApp() + + http_server = get_webapp() + config = config_parser.ConfigParser() + http_server.listen(config.web_server_port, address=config.web_server_ip) + + print("Please visit http://" + config.web_server_ip + ":" + + str(config.web_server_port) + " for presenter server") + tornado.ioloop.IOLoop.instance().start() + + +def stop_webapp(): + """ + stop web app + """ + tornado.ioloop.IOLoop.instance().stop() diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/123.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/123.png new file mode 100644 index 0000000000000000000000000000000000000000..a0c4be5458c98a9c0dee80044d89887222b2c7a3 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/123.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/384_304.jpg b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/384_304.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ff08e4218ed4efe1eee16ce46440e2b17984a22d Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/384_304.jpg differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/384_304_02.jpg b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/384_304_02.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d10ebec6d89f7573b4e827461dcdb4b3750c12d2 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/384_304_02.jpg differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/bootstrap/css/bootstrap.min.css b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/bootstrap/css/bootstrap.min.css new file mode 100644 index 0000000000000000000000000000000000000000..c547283bbda8533f4a19931b112c282fda789891 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/bootstrap/css/bootstrap.min.css @@ -0,0 +1,7 @@ +/*! + * Bootstrap v3.0.3 (http://getbootstrap.com) + * Copyright 2013 Twitter, Inc. + * Licensed under http://www.apache.org/licenses/LICENSE-2.0 + */ + +/*! normalize.css v2.1.3 | MIT License | git.io/normalize */article,aside,details,figcaption,figure,footer,header,hgroup,main,nav,section,summary{display:block}audio,canvas,video{display:inline-block}audio:not([controls]){display:none;height:0}[hidden],template{display:none}html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a{background:transparent}a:focus{outline:thin dotted}a:active,a:hover{outline:0}h1{margin:.67em 0;font-size:2em}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}dfn{font-style:italic}hr{height:0;-moz-box-sizing:content-box;box-sizing:content-box}mark{color:#000;background:#ff0}code,kbd,pre,samp{font-family:monospace,serif;font-size:1em}pre{white-space:pre-wrap}q{quotes:"\201C" "\201D" "\2018" "\2019"}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:0}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid #c0c0c0}legend{padding:0;border:0}button,input,select,textarea{margin:0;font-family:inherit;font-size:100%}button,input{line-height:normal}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}button[disabled],html input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{padding:0;box-sizing:border-box}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}textarea{overflow:auto;vertical-align:top}table{border-collapse:collapse;border-spacing:0}@media print{*{color:#000!important;text-shadow:none!important;background:transparent!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:2cm .5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}select{background:#fff!important}.navbar{display:none}.table td,.table th{background-color:#fff!important}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table-bordered th,.table-bordered td{border:1px solid #ddd!important}}*,*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:62.5%;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.428571429;color:#333;background-color:#fff}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#428bca;text-decoration:none}a:hover,a:focus{color:#2a6496;text-decoration:underline}a:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}img{vertical-align:middle}.img-responsive{display:block;height:auto;max-width:100%}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;height:auto;max-width:100%;padding:4px;line-height:1.428571429;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-weight:500;line-height:1.1;color:inherit}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small,.h1 small,.h2 small,.h3 small,.h4 small,.h5 small,.h6 small,h1 .small,h2 .small,h3 .small,h4 .small,h5 .small,h6 .small,.h1 .small,.h2 .small,.h3 .small,.h4 .small,.h5 .small,.h6 .small{font-weight:normal;line-height:1;color:#999}h1,h2,h3{margin-top:20px;margin-bottom:10px}h1 small,h2 small,h3 small,h1 .small,h2 .small,h3 .small{font-size:65%}h4,h5,h6{margin-top:10px;margin-bottom:10px}h4 small,h5 small,h6 small,h4 .small,h5 .small,h6 .small{font-size:75%}h1,.h1{font-size:36px}h2,.h2{font-size:30px}h3,.h3{font-size:24px}h4,.h4{font-size:18px}h5,.h5{font-size:14px}h6,.h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:200;line-height:1.4}@media(min-width:768px){.lead{font-size:21px}}small,.small{font-size:85%}cite{font-style:normal}.text-muted{color:#999}.text-primary{color:#428bca}.text-primary:hover{color:#3071a9}.text-warning{color:#8a6d3b}.text-warning:hover{color:#66512c}.text-danger{color:#a94442}.text-danger:hover{color:#843534}.text-success{color:#3c763d}.text-success:hover{color:#2b542c}.text-info{color:#31708f}.text-info:hover{color:#245269}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ul,ol{margin-top:0;margin-bottom:10px}ul ul,ol ul,ul ol,ol ol{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}.list-inline>li:first-child{padding-left:0}dl{margin-top:0;margin-bottom:20px}dt,dd{line-height:1.428571429}dt{font-weight:bold}dd{margin-left:0}@media(min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}.dl-horizontal dd:before,.dl-horizontal dd:after{display:table;content:" "}.dl-horizontal dd:after{clear:both}.dl-horizontal dd:before,.dl-horizontal dd:after{display:table;content:" "}.dl-horizontal dd:after{clear:both}}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #999}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;border-left:5px solid #eee}blockquote p{font-size:17.5px;font-weight:300;line-height:1.25}blockquote p:last-child{margin-bottom:0}blockquote small,blockquote .small{display:block;line-height:1.428571429;color:#999}blockquote small:before,blockquote .small:before{content:'\2014 \00A0'}blockquote.pull-right{padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0}blockquote.pull-right p,blockquote.pull-right small,blockquote.pull-right .small{text-align:right}blockquote.pull-right small:before,blockquote.pull-right .small:before{content:''}blockquote.pull-right small:after,blockquote.pull-right .small:after{content:'\00A0 \2014'}blockquote:before,blockquote:after{content:""}address{margin-bottom:20px;font-style:normal;line-height:1.428571429}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;white-space:nowrap;background-color:#f9f2f4;border-radius:4px}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.428571429;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.container:before,.container:after{display:table;content:" "}.container:after{clear:both}.container:before,.container:after{display:table;content:" "}.container:after{clear:both}@media(min-width:768px){.container{width:750px}}@media(min-width:992px){.container{width:970px}}@media(min-width:1200px){.container{width:1170px}}.row{margin-right:-15px;margin-left:-15px}.row:before,.row:after{display:table;content:" "}.row:after{clear:both}.row:before,.row:after{display:table;content:" "}.row:after{clear:both}.col-xs-1,.col-sm-1,.col-md-1,.col-lg-1,.col-xs-2,.col-sm-2,.col-md-2,.col-lg-2,.col-xs-3,.col-sm-3,.col-md-3,.col-lg-3,.col-xs-4,.col-sm-4,.col-md-4,.col-lg-4,.col-xs-5,.col-sm-5,.col-md-5,.col-lg-5,.col-xs-6,.col-sm-6,.col-md-6,.col-lg-6,.col-xs-7,.col-sm-7,.col-md-7,.col-lg-7,.col-xs-8,.col-sm-8,.col-md-8,.col-lg-8,.col-xs-9,.col-sm-9,.col-md-9,.col-lg-9,.col-xs-10,.col-sm-10,.col-md-10,.col-lg-10,.col-xs-11,.col-sm-11,.col-md-11,.col-lg-11,.col-xs-12,.col-sm-12,.col-md-12,.col-lg-12{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.col-xs-10,.col-xs-11,.col-xs-12{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666666666666%}.col-xs-10{width:83.33333333333334%}.col-xs-9{width:75%}.col-xs-8{width:66.66666666666666%}.col-xs-7{width:58.333333333333336%}.col-xs-6{width:50%}.col-xs-5{width:41.66666666666667%}.col-xs-4{width:33.33333333333333%}.col-xs-3{width:25%}.col-xs-2{width:16.666666666666664%}.col-xs-1{width:8.333333333333332%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666666666666%}.col-xs-pull-10{right:83.33333333333334%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666666666666%}.col-xs-pull-7{right:58.333333333333336%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666666666667%}.col-xs-pull-4{right:33.33333333333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.666666666666664%}.col-xs-pull-1{right:8.333333333333332%}.col-xs-pull-0{right:0}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666666666666%}.col-xs-push-10{left:83.33333333333334%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666666666666%}.col-xs-push-7{left:58.333333333333336%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666666666667%}.col-xs-push-4{left:33.33333333333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.666666666666664%}.col-xs-push-1{left:8.333333333333332%}.col-xs-push-0{left:0}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666666666666%}.col-xs-offset-10{margin-left:83.33333333333334%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666666666666%}.col-xs-offset-7{margin-left:58.333333333333336%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666666666667%}.col-xs-offset-4{margin-left:33.33333333333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.666666666666664%}.col-xs-offset-1{margin-left:8.333333333333332%}.col-xs-offset-0{margin-left:0}@media(min-width:768px){.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666666666666%}.col-sm-10{width:83.33333333333334%}.col-sm-9{width:75%}.col-sm-8{width:66.66666666666666%}.col-sm-7{width:58.333333333333336%}.col-sm-6{width:50%}.col-sm-5{width:41.66666666666667%}.col-sm-4{width:33.33333333333333%}.col-sm-3{width:25%}.col-sm-2{width:16.666666666666664%}.col-sm-1{width:8.333333333333332%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666666666666%}.col-sm-pull-10{right:83.33333333333334%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666666666666%}.col-sm-pull-7{right:58.333333333333336%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666666666667%}.col-sm-pull-4{right:33.33333333333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.666666666666664%}.col-sm-pull-1{right:8.333333333333332%}.col-sm-pull-0{right:0}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666666666666%}.col-sm-push-10{left:83.33333333333334%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666666666666%}.col-sm-push-7{left:58.333333333333336%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666666666667%}.col-sm-push-4{left:33.33333333333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.666666666666664%}.col-sm-push-1{left:8.333333333333332%}.col-sm-push-0{left:0}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666666666666%}.col-sm-offset-10{margin-left:83.33333333333334%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666666666666%}.col-sm-offset-7{margin-left:58.333333333333336%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666666666667%}.col-sm-offset-4{margin-left:33.33333333333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.666666666666664%}.col-sm-offset-1{margin-left:8.333333333333332%}.col-sm-offset-0{margin-left:0}}@media(min-width:992px){.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666666666666%}.col-md-10{width:83.33333333333334%}.col-md-9{width:75%}.col-md-8{width:66.66666666666666%}.col-md-7{width:58.333333333333336%}.col-md-6{width:50%}.col-md-5{width:41.66666666666667%}.col-md-4{width:33.33333333333333%}.col-md-3{width:25%}.col-md-2{width:16.666666666666664%}.col-md-1{width:8.333333333333332%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666666666666%}.col-md-pull-10{right:83.33333333333334%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666666666666%}.col-md-pull-7{right:58.333333333333336%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666666666667%}.col-md-pull-4{right:33.33333333333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.666666666666664%}.col-md-pull-1{right:8.333333333333332%}.col-md-pull-0{right:0}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666666666666%}.col-md-push-10{left:83.33333333333334%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666666666666%}.col-md-push-7{left:58.333333333333336%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666666666667%}.col-md-push-4{left:33.33333333333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.666666666666664%}.col-md-push-1{left:8.333333333333332%}.col-md-push-0{left:0}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666666666666%}.col-md-offset-10{margin-left:83.33333333333334%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666666666666%}.col-md-offset-7{margin-left:58.333333333333336%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666666666667%}.col-md-offset-4{margin-left:33.33333333333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.666666666666664%}.col-md-offset-1{margin-left:8.333333333333332%}.col-md-offset-0{margin-left:0}}@media(min-width:1200px){.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666666666666%}.col-lg-10{width:83.33333333333334%}.col-lg-9{width:75%}.col-lg-8{width:66.66666666666666%}.col-lg-7{width:58.333333333333336%}.col-lg-6{width:50%}.col-lg-5{width:41.66666666666667%}.col-lg-4{width:33.33333333333333%}.col-lg-3{width:25%}.col-lg-2{width:16.666666666666664%}.col-lg-1{width:8.333333333333332%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666666666666%}.col-lg-pull-10{right:83.33333333333334%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666666666666%}.col-lg-pull-7{right:58.333333333333336%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666666666667%}.col-lg-pull-4{right:33.33333333333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.666666666666664%}.col-lg-pull-1{right:8.333333333333332%}.col-lg-pull-0{right:0}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666666666666%}.col-lg-push-10{left:83.33333333333334%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666666666666%}.col-lg-push-7{left:58.333333333333336%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666666666667%}.col-lg-push-4{left:33.33333333333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.666666666666664%}.col-lg-push-1{left:8.333333333333332%}.col-lg-push-0{left:0}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666666666666%}.col-lg-offset-10{margin-left:83.33333333333334%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666666666666%}.col-lg-offset-7{margin-left:58.333333333333336%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666666666667%}.col-lg-offset-4{margin-left:33.33333333333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.666666666666664%}.col-lg-offset-1{margin-left:8.333333333333332%}.col-lg-offset-0{margin-left:0}}table{max-width:100%;background-color:transparent}th{text-align:left}.table{width:100%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.428571429;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>th,.table>caption+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>td,.table>thead:first-child>tr:first-child>td{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>thead>tr>th,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>tbody>tr>td,.table-condensed>tfoot>tr>td{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>tbody>tr>td,.table-bordered>tfoot>tr>td{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>thead>tr>td{border-bottom-width:2px}.table-striped>tbody>tr:nth-child(odd)>td,.table-striped>tbody>tr:nth-child(odd)>th{background-color:#f9f9f9}.table-hover>tbody>tr:hover>td,.table-hover>tbody>tr:hover>th{background-color:#f5f5f5}table col[class*="col-"]{position:static;display:table-column;float:none}table td[class*="col-"],table th[class*="col-"]{display:table-cell;float:none}.table>thead>tr>.active,.table>tbody>tr>.active,.table>tfoot>tr>.active,.table>thead>.active>td,.table>tbody>.active>td,.table>tfoot>.active>td,.table>thead>.active>th,.table>tbody>.active>th,.table>tfoot>.active>th{background-color:#f5f5f5}.table-hover>tbody>tr>.active:hover,.table-hover>tbody>.active:hover>td,.table-hover>tbody>.active:hover>th{background-color:#e8e8e8}.table>thead>tr>.success,.table>tbody>tr>.success,.table>tfoot>tr>.success,.table>thead>.success>td,.table>tbody>.success>td,.table>tfoot>.success>td,.table>thead>.success>th,.table>tbody>.success>th,.table>tfoot>.success>th{background-color:#dff0d8}.table-hover>tbody>tr>.success:hover,.table-hover>tbody>.success:hover>td,.table-hover>tbody>.success:hover>th{background-color:#d0e9c6}.table>thead>tr>.danger,.table>tbody>tr>.danger,.table>tfoot>tr>.danger,.table>thead>.danger>td,.table>tbody>.danger>td,.table>tfoot>.danger>td,.table>thead>.danger>th,.table>tbody>.danger>th,.table>tfoot>.danger>th{background-color:#f2dede}.table-hover>tbody>tr>.danger:hover,.table-hover>tbody>.danger:hover>td,.table-hover>tbody>.danger:hover>th{background-color:#ebcccc}.table>thead>tr>.warning,.table>tbody>tr>.warning,.table>tfoot>tr>.warning,.table>thead>.warning>td,.table>tbody>.warning>td,.table>tfoot>.warning>td,.table>thead>.warning>th,.table>tbody>.warning>th,.table>tfoot>.warning>th{background-color:#fcf8e3}.table-hover>tbody>tr>.warning:hover,.table-hover>tbody>.warning:hover>td,.table-hover>tbody>.warning:hover>th{background-color:#faf2cc}@media(max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-x:scroll;overflow-y:hidden;border:1px solid #ddd;-ms-overflow-style:-ms-autohiding-scrollbar;-webkit-overflow-scrolling:touch}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>thead>tr>th,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tfoot>tr>td{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>thead>tr>th:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.table-responsive>.table-bordered>thead>tr>th:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>th,.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}}fieldset{padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;margin-bottom:5px;font-weight:bold}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;line-height:normal}input[type="file"]{display:block}select[multiple],select[size]{height:auto}select optgroup{font-family:inherit;font-size:inherit;font-style:inherit}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}input[type="number"]::-webkit-outer-spin-button,input[type="number"]::-webkit-inner-spin-button{height:auto}output{display:block;padding-top:7px;font-size:14px;line-height:1.428571429;color:#555;vertical-align:middle}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.428571429;color:#555;vertical-align:middle;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(102,175,233,0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(102,175,233,0.6)}.form-control:-moz-placeholder{color:#999}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{cursor:not-allowed;background-color:#eee}textarea.form-control{height:auto}.form-group{margin-bottom:15px}.radio,.checkbox{display:block;min-height:20px;padding-left:20px;margin-top:10px;margin-bottom:10px;vertical-align:middle}.radio label,.checkbox label{display:inline;margin-bottom:0;font-weight:normal;cursor:pointer}.radio input[type="radio"],.radio-inline input[type="radio"],.checkbox input[type="checkbox"],.checkbox-inline input[type="checkbox"]{float:left;margin-left:-20px}.radio+.radio,.checkbox+.checkbox{margin-top:-5px}.radio-inline,.checkbox-inline{display:inline-block;padding-left:20px;margin-bottom:0;font-weight:normal;vertical-align:middle;cursor:pointer}.radio-inline+.radio-inline,.checkbox-inline+.checkbox-inline{margin-top:0;margin-left:10px}input[type="radio"][disabled],input[type="checkbox"][disabled],.radio[disabled],.radio-inline[disabled],.checkbox[disabled],.checkbox-inline[disabled],fieldset[disabled] input[type="radio"],fieldset[disabled] input[type="checkbox"],fieldset[disabled] .radio,fieldset[disabled] .radio-inline,fieldset[disabled] .checkbox,fieldset[disabled] .checkbox-inline{cursor:not-allowed}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}textarea.input-sm{height:auto}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}select.input-lg{height:46px;line-height:46px}textarea.input-lg{height:auto}.has-warning .help-block,.has-warning .control-label,.has-warning .radio,.has-warning .checkbox,.has-warning .radio-inline,.has-warning .checkbox-inline{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-error .help-block,.has-error .control-label,.has-error .radio,.has-error .checkbox,.has-error .radio-inline,.has-error .checkbox-inline{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-success .help-block,.has-success .control-label,.has-success .radio,.has-success .checkbox,.has-success .radio-inline,.has-success .checkbox-inline{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.form-control-static{margin-bottom:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media(min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block}.form-inline select.form-control{width:auto}.form-inline .radio,.form-inline .checkbox{display:inline-block;padding-left:0;margin-top:0;margin-bottom:0}.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:none;margin-left:0}}.form-horizontal .control-label,.form-horizontal .radio,.form-horizontal .checkbox,.form-horizontal .radio-inline,.form-horizontal .checkbox-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .radio,.form-horizontal .checkbox{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}.form-horizontal .form-group:before,.form-horizontal .form-group:after{display:table;content:" "}.form-horizontal .form-group:after{clear:both}.form-horizontal .form-group:before,.form-horizontal .form-group:after{display:table;content:" "}.form-horizontal .form-group:after{clear:both}.form-horizontal .form-control-static{padding-top:7px}@media(min-width:768px){.form-horizontal .control-label{text-align:right}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:normal;line-height:1.428571429;text-align:center;white-space:nowrap;vertical-align:middle;cursor:pointer;background-image:none;border:1px solid transparent;border-radius:4px;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none}.btn:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn:hover,.btn:focus{color:#333;text-decoration:none}.btn:active,.btn.active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{pointer-events:none;cursor:not-allowed;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default:hover,.btn-default:focus,.btn-default:active,.btn-default.active,.open .dropdown-toggle.btn-default{color:#333;background-color:#ebebeb;border-color:#adadad}.btn-default:active,.btn-default.active,.open .dropdown-toggle.btn-default{background-image:none}.btn-default.disabled,.btn-default[disabled],fieldset[disabled] .btn-default,.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled:active,.btn-default[disabled]:active,fieldset[disabled] .btn-default:active,.btn-default.disabled.active,.btn-default[disabled].active,fieldset[disabled] .btn-default.active{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#fff}.btn-primary{color:#fff;background-color:#428bca;border-color:#357ebd}.btn-primary:hover,.btn-primary:focus,.btn-primary:active,.btn-primary.active,.open .dropdown-toggle.btn-primary{color:#fff;background-color:#3276b1;border-color:#285e8e}.btn-primary:active,.btn-primary.active,.open .dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled,.btn-primary[disabled],fieldset[disabled] .btn-primary,.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled:active,.btn-primary[disabled]:active,fieldset[disabled] .btn-primary:active,.btn-primary.disabled.active,.btn-primary[disabled].active,fieldset[disabled] .btn-primary.active{background-color:#428bca;border-color:#357ebd}.btn-primary .badge{color:#428bca;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning:hover,.btn-warning:focus,.btn-warning:active,.btn-warning.active,.open .dropdown-toggle.btn-warning{color:#fff;background-color:#ed9c28;border-color:#d58512}.btn-warning:active,.btn-warning.active,.open .dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled,.btn-warning[disabled],fieldset[disabled] .btn-warning,.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled:active,.btn-warning[disabled]:active,fieldset[disabled] .btn-warning:active,.btn-warning.disabled.active,.btn-warning[disabled].active,fieldset[disabled] .btn-warning.active{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger:hover,.btn-danger:focus,.btn-danger:active,.btn-danger.active,.open .dropdown-toggle.btn-danger{color:#fff;background-color:#d2322d;border-color:#ac2925}.btn-danger:active,.btn-danger.active,.open .dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled,.btn-danger[disabled],fieldset[disabled] .btn-danger,.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled:active,.btn-danger[disabled]:active,fieldset[disabled] .btn-danger:active,.btn-danger.disabled.active,.btn-danger[disabled].active,fieldset[disabled] .btn-danger.active{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success:hover,.btn-success:focus,.btn-success:active,.btn-success.active,.open .dropdown-toggle.btn-success{color:#fff;background-color:#47a447;border-color:#398439}.btn-success:active,.btn-success.active,.open .dropdown-toggle.btn-success{background-image:none}.btn-success.disabled,.btn-success[disabled],fieldset[disabled] .btn-success,.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled:active,.btn-success[disabled]:active,fieldset[disabled] .btn-success:active,.btn-success.disabled.active,.btn-success[disabled].active,fieldset[disabled] .btn-success.active{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info:hover,.btn-info:focus,.btn-info:active,.btn-info.active,.open .dropdown-toggle.btn-info{color:#fff;background-color:#39b3d7;border-color:#269abc}.btn-info:active,.btn-info.active,.open .dropdown-toggle.btn-info{background-image:none}.btn-info.disabled,.btn-info[disabled],fieldset[disabled] .btn-info,.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled:active,.btn-info[disabled]:active,fieldset[disabled] .btn-info:active,.btn-info.disabled.active,.btn-info[disabled].active,fieldset[disabled] .btn-info.active{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-link{font-weight:normal;color:#428bca;cursor:pointer;border-radius:0}.btn-link,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:hover,.btn-link:focus,.btn-link:active{border-color:transparent}.btn-link:hover,.btn-link:focus{color:#2a6496;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,fieldset[disabled] .btn-link:hover,.btn-link[disabled]:focus,fieldset[disabled] .btn-link:focus{color:#999;text-decoration:none}.btn-lg{padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%;padding-right:0;padding-left:0}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;transition:height .35s ease}@font-face{font-family:'Glyphicons Halflings';src:url('../fonts/glyphicons-halflings-regular.eot');src:url('../fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'),url('../fonts/glyphicons-halflings-regular.woff') format('woff'),url('../fonts/glyphicons-halflings-regular.ttf') format('truetype'),url('../fonts/glyphicons-halflings-regular.svg#glyphicons-halflingsregular') format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';-webkit-font-smoothing:antialiased;font-style:normal;font-weight:normal;line-height:1;-moz-osx-font-smoothing:grayscale}.glyphicon:empty{width:1em}.glyphicon-asterisk:before{content:"\2a"}.glyphicon-plus:before{content:"\2b"}.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px solid;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;list-style:none;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175);background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:1.428571429;color:#333;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;background-color:#428bca;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#999}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.428571429;color:#999}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}@media(min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;float:left}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover,.btn-group>.btn:focus,.btn-group-vertical>.btn:focus,.btn-group>.btn:active,.btn-group-vertical>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn.active{z-index:2}.btn-group>.btn:focus,.btn-group-vertical>.btn:focus{outline:0}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar:before,.btn-toolbar:after{display:table;content:" "}.btn-toolbar:after{clear:both}.btn-toolbar:before,.btn-toolbar:after{display:table;content:" "}.btn-toolbar:after{clear:both}.btn-toolbar .btn-group{float:left}.btn-toolbar>.btn+.btn,.btn-toolbar>.btn-group+.btn,.btn-toolbar>.btn+.btn-group,.btn-toolbar>.btn-group+.btn-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child>.btn:last-child,.btn-group>.btn-group:first-child>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child>.btn:first-child{border-bottom-left-radius:0;border-top-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group-xs>.btn{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-sm>.btn{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-lg>.btn{padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group:before,.btn-group-vertical>.btn-group:after{display:table;content:" "}.btn-group-vertical>.btn-group:after{clear:both}.btn-group-vertical>.btn-group:before,.btn-group-vertical>.btn-group:after{display:table;content:" "}.btn-group-vertical>.btn-group:after{clear:both}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-right-radius:0;border-bottom-left-radius:4px;border-top-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child>.btn:last-child,.btn-group-vertical>.btn-group:first-child>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child>.btn:first-child{border-top-right-radius:0;border-top-left-radius:0}.btn-group-justified{display:table;width:100%;border-collapse:separate;table-layout:fixed}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}[data-toggle="buttons"]>.btn>input[type="radio"],[data-toggle="buttons"]>.btn>input[type="checkbox"]{display:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*="col-"]{float:none;padding-right:0;padding-left:0}.input-group .form-control{width:100%;margin-bottom:0}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:normal;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type="radio"],.input-group-addon input[type="checkbox"]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:last-child>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group-btn:first-child>.btn:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;white-space:nowrap}.input-group-btn:first-child>.btn{margin-right:-1px}.input-group-btn:last-child>.btn{margin-left:-1px}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-4px}.input-group-btn>.btn:hover,.input-group-btn>.btn:active{z-index:2}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav:before,.nav:after{display:table;content:" "}.nav:after{clear:both}.nav:before,.nav:after{display:table;content:" "}.nav:after{clear:both}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#999}.nav>li.disabled>a:hover,.nav>li.disabled>a:focus{color:#999;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:hover,.nav .open>a:focus{background-color:#eee;border-color:#428bca}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.428571429;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:hover,.nav-tabs>li.active>a:focus{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media(min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border:1px solid #ddd}@media(min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:hover,.nav-pills>li.active>a:focus{color:#fff;background-color:#428bca}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media(min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border:1px solid #ddd}@media(min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-right-radius:0;border-top-left-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}.navbar:before,.navbar:after{display:table;content:" "}.navbar:after{clear:both}.navbar:before,.navbar:after{display:table;content:" "}.navbar:after{clear:both}@media(min-width:768px){.navbar{border-radius:4px}}.navbar-header:before,.navbar-header:after{display:table;content:" "}.navbar-header:after{clear:both}.navbar-header:before,.navbar-header:after{display:table;content:" "}.navbar-header:after{clear:both}@media(min-width:768px){.navbar-header{float:left}}.navbar-collapse{max-height:340px;padding-right:15px;padding-left:15px;overflow-x:visible;border-top:1px solid transparent;box-shadow:inset 0 1px 0 rgba(255,255,255,0.1);-webkit-overflow-scrolling:touch}.navbar-collapse:before,.navbar-collapse:after{display:table;content:" "}.navbar-collapse:after{clear:both}.navbar-collapse:before,.navbar-collapse:after{display:table;content:" "}.navbar-collapse:after{clear:both}.navbar-collapse.in{overflow-y:auto}@media(min-width:768px){.navbar-collapse{width:auto;border-top:0;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{padding-right:0;padding-left:0}}.container>.navbar-header,.container>.navbar-collapse{margin-right:-15px;margin-left:-15px}@media(min-width:768px){.container>.navbar-header,.container>.navbar-collapse{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media(min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030}@media(min-width:768px){.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}@media(min-width:768px){.navbar>.container .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media(min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media(max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;box-shadow:none}.navbar-nav .open .dropdown-menu>li>a,.navbar-nav .open .dropdown-menu .dropdown-header{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:hover,.navbar-nav .open .dropdown-menu>li>a:focus{background-image:none}}@media(min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}.navbar-nav.navbar-right:last-child{margin-right:-15px}}@media(min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1)}@media(min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block}.navbar-form select.form-control{width:auto}.navbar-form .radio,.navbar-form .checkbox{display:inline-block;padding-left:0;margin-top:0;margin-bottom:0}.navbar-form .radio input[type="radio"],.navbar-form .checkbox input[type="checkbox"]{float:none;margin-left:0}}@media(max-width:767px){.navbar-form .form-group{margin-bottom:5px}}@media(min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-form.navbar-right:last-child{margin-right:-15px}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-right-radius:0;border-top-left-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-nav.pull-right>li>.dropdown-menu,.navbar-nav>li>.dropdown-menu.pull-right{right:0;left:auto}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media(min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}.navbar-text.navbar-right:last-child{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:hover,.navbar-default .navbar-brand:focus{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:hover,.navbar-default .navbar-nav>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:hover,.navbar-default .navbar-nav>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:hover,.navbar-default .navbar-nav>.disabled>a:focus{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:hover,.navbar-default .navbar-toggle:focus{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#ccc}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:hover,.navbar-default .navbar-nav>.open>a:focus{color:#555;background-color:#e7e7e7}@media(max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#999}.navbar-inverse .navbar-brand:hover,.navbar-inverse .navbar-brand:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#999}.navbar-inverse .navbar-nav>li>a{color:#999}.navbar-inverse .navbar-nav>li>a:hover,.navbar-inverse .navbar-nav>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:hover,.navbar-inverse .navbar-nav>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:hover,.navbar-inverse .navbar-nav>.disabled>a:focus{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:hover,.navbar-inverse .navbar-toggle:focus{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:hover,.navbar-inverse .navbar-nav>.open>a:focus{color:#fff;background-color:#080808}@media(max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#999}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover{color:#fff}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#999}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.428571429;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-bottom-left-radius:4px;border-top-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:hover,.pagination>li>span:hover,.pagination>li>a:focus,.pagination>li>span:focus{background-color:#eee}.pagination>.active>a,.pagination>.active>span,.pagination>.active>a:hover,.pagination>.active>span:hover,.pagination>.active>a:focus,.pagination>.active>span:focus{z-index:2;color:#fff;cursor:default;background-color:#428bca;border-color:#428bca}.pagination>.disabled>span,.pagination>.disabled>span:hover,.pagination>.disabled>span:focus,.pagination>.disabled>a,.pagination>.disabled>a:hover,.pagination>.disabled>a:focus{color:#999;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-bottom-left-radius:6px;border-top-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-bottom-left-radius:3px;border-top-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager:before,.pager:after{display:table;content:" "}.pager:after{clear:both}.pager:before,.pager:after{display:table;content:" "}.pager:after{clear:both}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#999;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:bold;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}.label[href]:hover,.label[href]:focus{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#999}.label-default[href]:hover,.label-default[href]:focus{background-color:#808080}.label-primary{background-color:#428bca}.label-primary[href]:hover,.label-primary[href]:focus{background-color:#3071a9}.label-success{background-color:#5cb85c}.label-success[href]:hover,.label-success[href]:focus{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:hover,.label-info[href]:focus{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:hover,.label-warning[href]:focus{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:hover,.label-danger[href]:focus{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:bold;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;background-color:#999;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}a.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#428bca;background-color:#fff}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding:30px;margin-bottom:30px;font-size:21px;font-weight:200;line-height:2.1428571435;color:inherit;background-color:#eee}.jumbotron h1,.jumbotron .h1{line-height:1;color:inherit}.jumbotron p{line-height:1.4}.container .jumbotron{border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron{padding-right:60px;padding-left:60px}.jumbotron h1,.jumbotron .h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.428571429;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.thumbnail>img,.thumbnail a>img{display:block;height:auto;max-width:100%;margin-right:auto;margin-left:auto}a.thumbnail:hover,a.thumbnail:focus,a.thumbnail.active{border-color:#428bca}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:bold}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable{padding-right:35px}.alert-dismissable .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#428bca;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-transition:width .6s ease;transition:width .6s ease}.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-size:40px 40px}.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.media,.media-body{overflow:hidden;zoom:1}.media,.media .media{margin-top:15px}.media:first-child{margin-top:0}.media-object{display:block}.media-heading{margin:0 0 5px}.media>.pull-left{margin-right:10px}.media>.pull-right{margin-left:10px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-right-radius:4px;border-top-left-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}a.list-group-item{color:#555}a.list-group-item .list-group-item-heading{color:#333}a.list-group-item:hover,a.list-group-item:focus{text-decoration:none;background-color:#f5f5f5}a.list-group-item.active,a.list-group-item.active:hover,a.list-group-item.active:focus{z-index:2;color:#fff;background-color:#428bca;border-color:#428bca}a.list-group-item.active .list-group-item-heading,a.list-group-item.active:hover .list-group-item-heading,a.list-group-item.active:focus .list-group-item-heading{color:inherit}a.list-group-item.active .list-group-item-text,a.list-group-item.active:hover .list-group-item-text,a.list-group-item.active:focus .list-group-item-text{color:#e1edf7}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,0.05);box-shadow:0 1px 1px rgba(0,0,0,0.05)}.panel-body{padding:15px}.panel-body:before,.panel-body:after{display:table;content:" "}.panel-body:after{clear:both}.panel-body:before,.panel-body:after{display:table;content:" "}.panel-body:after{clear:both}.panel>.list-group{margin-bottom:0}.panel>.list-group .list-group-item{border-width:1px 0}.panel>.list-group .list-group-item:first-child{border-top-right-radius:0;border-top-left-radius:0}.panel>.list-group .list-group-item:last-child{border-bottom:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive{border-top:1px solid #ddd}.panel>.table>tbody:first-child th,.panel>.table>tbody:first-child td{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.panel>.table-bordered>thead>tr:last-child>th,.panel>.table-responsive>.table-bordered>thead>tr:last-child>th,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th,.panel>.table-bordered>thead>tr:last-child>td,.panel>.table-responsive>.table-bordered>thead>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-right-radius:3px;border-top-left-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel-group .panel{margin-bottom:0;overflow:hidden;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse .panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse .panel-body{border-top-color:#ddd}.panel-default>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#428bca}.panel-primary>.panel-heading{color:#fff;background-color:#428bca;border-color:#428bca}.panel-primary>.panel-heading+.panel-collapse .panel-body{border-top-color:#428bca}.panel-primary>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#428bca}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse .panel-body{border-top-color:#d6e9c6}.panel-success>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#d6e9c6}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse .panel-body{border-top-color:#faebcc}.panel-warning>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse .panel-body{border-top-color:#ebccd1}.panel-danger>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#ebccd1}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse .panel-body{border-top-color:#bce8f1}.panel-info>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#bce8f1}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:bold;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.5;filter:alpha(opacity=50)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;display:none;overflow:auto;overflow-y:scroll}.modal.fade .modal-dialog{-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);transform:translate(0,-25%);-webkit-transition:-webkit-transform .3s ease-out;-moz-transition:-moz-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);transform:translate(0,0)}.modal-dialog{position:relative;z-index:1050;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,0.5);box-shadow:0 3px 9px rgba(0,0,0,0.5);background-clip:padding-box}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1030;background-color:#000}.modal-backdrop.fade{opacity:0;filter:alpha(opacity=0)}.modal-backdrop.in{opacity:.5;filter:alpha(opacity=50)}.modal-header{min-height:16.428571429px;padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.428571429}.modal-body{position:relative;padding:20px}.modal-footer{padding:19px 20px 20px;margin-top:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer:before,.modal-footer:after{display:table;content:" "}.modal-footer:after{clear:both}.modal-footer:before,.modal-footer:after{display:table;content:" "}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}@media screen and (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,0.5);box-shadow:0 5px 15px rgba(0,0,0,0.5)}}.tooltip{position:absolute;z-index:1030;display:block;font-size:12px;line-height:1.4;opacity:0;filter:alpha(opacity=0);visibility:visible}.tooltip.in{opacity:.9;filter:alpha(opacity=90)}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.top-left .tooltip-arrow{bottom:0;left:5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.top-right .tooltip-arrow{right:5px;bottom:0;border-top-color:#000;border-width:5px 5px 0}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-right-color:#000;border-width:5px 5px 5px 0}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-left-color:#000;border-width:5px 0 5px 5px}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-bottom-color:#000;border-width:0 5px 5px}.tooltip.bottom-left .tooltip-arrow{top:0;left:5px;border-bottom-color:#000;border-width:0 5px 5px}.tooltip.bottom-right .tooltip-arrow{top:0;right:5px;border-bottom-color:#000;border-width:0 5px 5px}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;max-width:276px;padding:1px;text-align:left;white-space:normal;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);background-clip:padding-box}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover .arrow,.popover .arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover .arrow{border-width:11px}.popover .arrow:after{border-width:10px;content:""}.popover.top .arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);border-bottom-width:0}.popover.top .arrow:after{bottom:1px;margin-left:-10px;border-top-color:#fff;border-bottom-width:0;content:" "}.popover.right .arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,0.25);border-left-width:0}.popover.right .arrow:after{bottom:-10px;left:1px;border-right-color:#fff;border-left-width:0;content:" "}.popover.bottom .arrow{top:-11px;left:50%;margin-left:-11px;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);border-top-width:0}.popover.bottom .arrow:after{top:1px;margin-left:-10px;border-bottom-color:#fff;border-top-width:0;content:" "}.popover.left .arrow{top:50%;right:-11px;margin-top:-11px;border-left-color:#999;border-left-color:rgba(0,0,0,0.25);border-right-width:0}.popover.left .arrow:after{right:1px;bottom:-10px;border-left-color:#fff;border-right-width:0;content:" "}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;height:auto;max-width:100%;line-height:1}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6);opacity:.5;filter:alpha(opacity=50)}.carousel-control.left{background-image:-webkit-linear-gradient(left,color-stop(rgba(0,0,0,0.5) 0),color-stop(rgba(0,0,0,0.0001) 100%));background-image:linear-gradient(to right,rgba(0,0,0,0.5) 0,rgba(0,0,0,0.0001) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000',endColorstr='#00000000',GradientType=1)}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,color-stop(rgba(0,0,0,0.0001) 0),color-stop(rgba(0,0,0,0.5) 100%));background-image:linear-gradient(to right,rgba(0,0,0,0.0001) 0,rgba(0,0,0,0.5) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000',endColorstr='#80000000',GradientType=1)}.carousel-control:hover,.carousel-control:focus{color:#fff;text-decoration:none;outline:0;opacity:.9;filter:alpha(opacity=90)}.carousel-control .icon-prev,.carousel-control .icon-next,.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right{position:absolute;top:50%;z-index:5;display:inline-block}.carousel-control .icon-prev,.carousel-control .glyphicon-chevron-left{left:50%}.carousel-control .icon-next,.carousel-control .glyphicon-chevron-right{right:50%}.carousel-control .icon-prev,.carousel-control .icon-next{width:20px;height:20px;margin-top:-10px;margin-left:-10px;font-family:serif}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000 \9;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicons-chevron-left,.carousel-control .glyphicons-chevron-right,.carousel-control .icon-prev,.carousel-control .icon-next{width:30px;height:30px;margin-top:-15px;margin-left:-15px;font-size:30px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.clearfix:before,.clearfix:after{display:table;content:" "}.clearfix:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important;visibility:hidden!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-xs,tr.visible-xs,th.visible-xs,td.visible-xs{display:none!important}@media(max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table}tr.visible-xs{display:table-row!important}th.visible-xs,td.visible-xs{display:table-cell!important}}@media(min-width:768px) and (max-width:991px){.visible-xs.visible-sm{display:block!important}table.visible-xs.visible-sm{display:table}tr.visible-xs.visible-sm{display:table-row!important}th.visible-xs.visible-sm,td.visible-xs.visible-sm{display:table-cell!important}}@media(min-width:992px) and (max-width:1199px){.visible-xs.visible-md{display:block!important}table.visible-xs.visible-md{display:table}tr.visible-xs.visible-md{display:table-row!important}th.visible-xs.visible-md,td.visible-xs.visible-md{display:table-cell!important}}@media(min-width:1200px){.visible-xs.visible-lg{display:block!important}table.visible-xs.visible-lg{display:table}tr.visible-xs.visible-lg{display:table-row!important}th.visible-xs.visible-lg,td.visible-xs.visible-lg{display:table-cell!important}}.visible-sm,tr.visible-sm,th.visible-sm,td.visible-sm{display:none!important}@media(max-width:767px){.visible-sm.visible-xs{display:block!important}table.visible-sm.visible-xs{display:table}tr.visible-sm.visible-xs{display:table-row!important}th.visible-sm.visible-xs,td.visible-sm.visible-xs{display:table-cell!important}}@media(min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table}tr.visible-sm{display:table-row!important}th.visible-sm,td.visible-sm{display:table-cell!important}}@media(min-width:992px) and (max-width:1199px){.visible-sm.visible-md{display:block!important}table.visible-sm.visible-md{display:table}tr.visible-sm.visible-md{display:table-row!important}th.visible-sm.visible-md,td.visible-sm.visible-md{display:table-cell!important}}@media(min-width:1200px){.visible-sm.visible-lg{display:block!important}table.visible-sm.visible-lg{display:table}tr.visible-sm.visible-lg{display:table-row!important}th.visible-sm.visible-lg,td.visible-sm.visible-lg{display:table-cell!important}}.visible-md,tr.visible-md,th.visible-md,td.visible-md{display:none!important}@media(max-width:767px){.visible-md.visible-xs{display:block!important}table.visible-md.visible-xs{display:table}tr.visible-md.visible-xs{display:table-row!important}th.visible-md.visible-xs,td.visible-md.visible-xs{display:table-cell!important}}@media(min-width:768px) and (max-width:991px){.visible-md.visible-sm{display:block!important}table.visible-md.visible-sm{display:table}tr.visible-md.visible-sm{display:table-row!important}th.visible-md.visible-sm,td.visible-md.visible-sm{display:table-cell!important}}@media(min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table}tr.visible-md{display:table-row!important}th.visible-md,td.visible-md{display:table-cell!important}}@media(min-width:1200px){.visible-md.visible-lg{display:block!important}table.visible-md.visible-lg{display:table}tr.visible-md.visible-lg{display:table-row!important}th.visible-md.visible-lg,td.visible-md.visible-lg{display:table-cell!important}}.visible-lg,tr.visible-lg,th.visible-lg,td.visible-lg{display:none!important}@media(max-width:767px){.visible-lg.visible-xs{display:block!important}table.visible-lg.visible-xs{display:table}tr.visible-lg.visible-xs{display:table-row!important}th.visible-lg.visible-xs,td.visible-lg.visible-xs{display:table-cell!important}}@media(min-width:768px) and (max-width:991px){.visible-lg.visible-sm{display:block!important}table.visible-lg.visible-sm{display:table}tr.visible-lg.visible-sm{display:table-row!important}th.visible-lg.visible-sm,td.visible-lg.visible-sm{display:table-cell!important}}@media(min-width:992px) and (max-width:1199px){.visible-lg.visible-md{display:block!important}table.visible-lg.visible-md{display:table}tr.visible-lg.visible-md{display:table-row!important}th.visible-lg.visible-md,td.visible-lg.visible-md{display:table-cell!important}}@media(min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table}tr.visible-lg{display:table-row!important}th.visible-lg,td.visible-lg{display:table-cell!important}}.hidden-xs{display:block!important}table.hidden-xs{display:table}tr.hidden-xs{display:table-row!important}th.hidden-xs,td.hidden-xs{display:table-cell!important}@media(max-width:767px){.hidden-xs,tr.hidden-xs,th.hidden-xs,td.hidden-xs{display:none!important}}@media(min-width:768px) and (max-width:991px){.hidden-xs.hidden-sm,tr.hidden-xs.hidden-sm,th.hidden-xs.hidden-sm,td.hidden-xs.hidden-sm{display:none!important}}@media(min-width:992px) and (max-width:1199px){.hidden-xs.hidden-md,tr.hidden-xs.hidden-md,th.hidden-xs.hidden-md,td.hidden-xs.hidden-md{display:none!important}}@media(min-width:1200px){.hidden-xs.hidden-lg,tr.hidden-xs.hidden-lg,th.hidden-xs.hidden-lg,td.hidden-xs.hidden-lg{display:none!important}}.hidden-sm{display:block!important}table.hidden-sm{display:table}tr.hidden-sm{display:table-row!important}th.hidden-sm,td.hidden-sm{display:table-cell!important}@media(max-width:767px){.hidden-sm.hidden-xs,tr.hidden-sm.hidden-xs,th.hidden-sm.hidden-xs,td.hidden-sm.hidden-xs{display:none!important}}@media(min-width:768px) and (max-width:991px){.hidden-sm,tr.hidden-sm,th.hidden-sm,td.hidden-sm{display:none!important}}@media(min-width:992px) and (max-width:1199px){.hidden-sm.hidden-md,tr.hidden-sm.hidden-md,th.hidden-sm.hidden-md,td.hidden-sm.hidden-md{display:none!important}}@media(min-width:1200px){.hidden-sm.hidden-lg,tr.hidden-sm.hidden-lg,th.hidden-sm.hidden-lg,td.hidden-sm.hidden-lg{display:none!important}}.hidden-md{display:block!important}table.hidden-md{display:table}tr.hidden-md{display:table-row!important}th.hidden-md,td.hidden-md{display:table-cell!important}@media(max-width:767px){.hidden-md.hidden-xs,tr.hidden-md.hidden-xs,th.hidden-md.hidden-xs,td.hidden-md.hidden-xs{display:none!important}}@media(min-width:768px) and (max-width:991px){.hidden-md.hidden-sm,tr.hidden-md.hidden-sm,th.hidden-md.hidden-sm,td.hidden-md.hidden-sm{display:none!important}}@media(min-width:992px) and (max-width:1199px){.hidden-md,tr.hidden-md,th.hidden-md,td.hidden-md{display:none!important}}@media(min-width:1200px){.hidden-md.hidden-lg,tr.hidden-md.hidden-lg,th.hidden-md.hidden-lg,td.hidden-md.hidden-lg{display:none!important}}.hidden-lg{display:block!important}table.hidden-lg{display:table}tr.hidden-lg{display:table-row!important}th.hidden-lg,td.hidden-lg{display:table-cell!important}@media(max-width:767px){.hidden-lg.hidden-xs,tr.hidden-lg.hidden-xs,th.hidden-lg.hidden-xs,td.hidden-lg.hidden-xs{display:none!important}}@media(min-width:768px) and (max-width:991px){.hidden-lg.hidden-sm,tr.hidden-lg.hidden-sm,th.hidden-lg.hidden-sm,td.hidden-lg.hidden-sm{display:none!important}}@media(min-width:992px) and (max-width:1199px){.hidden-lg.hidden-md,tr.hidden-lg.hidden-md,th.hidden-lg.hidden-md,td.hidden-lg.hidden-md{display:none!important}}@media(min-width:1200px){.hidden-lg,tr.hidden-lg,th.hidden-lg,td.hidden-lg{display:none!important}}.visible-print,tr.visible-print,th.visible-print,td.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table}tr.visible-print{display:table-row!important}th.visible-print,td.visible-print{display:table-cell!important}.hidden-print,tr.hidden-print,th.hidden-print,td.hidden-print{display:none!important}} \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/bootstrap/js/bootstrap.min.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/bootstrap/js/bootstrap.min.js new file mode 100644 index 0000000000000000000000000000000000000000..1a6258efcbff4469d71123fbb01c921ac5231f14 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/bootstrap/js/bootstrap.min.js @@ -0,0 +1,7 @@ +/*! + * Bootstrap v3.0.3 (http://getbootstrap.com) + * Copyright 2013 Twitter, Inc. + * Licensed under http://www.apache.org/licenses/LICENSE-2.0 + */ + +if("undefined"==typeof jQuery)throw new Error("Bootstrap requires jQuery");+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]}}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b()})}(jQuery),+function(a){"use strict";var b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function c(){f.trigger("closed.bs.alert").remove()}var d=a(this),e=d.attr("data-target");e||(e=d.attr("href"),e=e&&e.replace(/.*(?=#[^\s]*$)/,""));var f=a(e);b&&b.preventDefault(),f.length||(f=d.hasClass("alert")?d:d.parent()),f.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one(a.support.transition.end,c).emulateTransitionEnd(150):c())};var d=a.fn.alert;a.fn.alert=function(b){return this.each(function(){var d=a(this),e=d.data("bs.alert");e||d.data("bs.alert",e=new c(this)),"string"==typeof b&&e[b].call(d)})},a.fn.alert.Constructor=c,a.fn.alert.noConflict=function(){return a.fn.alert=d,this},a(document).on("click.bs.alert.data-api",b,c.prototype.close)}(jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d)};b.DEFAULTS={loadingText:"loading..."},b.prototype.setState=function(a){var b="disabled",c=this.$element,d=c.is("input")?"val":"html",e=c.data();a+="Text",e.resetText||c.data("resetText",c[d]()),c[d](e[a]||this.options[a]),setTimeout(function(){"loadingText"==a?c.addClass(b).attr(b,b):c.removeClass(b).removeAttr(b)},0)},b.prototype.toggle=function(){var a=this.$element.closest('[data-toggle="buttons"]'),b=!0;if(a.length){var c=this.$element.find("input");"radio"===c.prop("type")&&(c.prop("checked")&&this.$element.hasClass("active")?b=!1:a.find(".active").removeClass("active")),b&&c.prop("checked",!this.$element.hasClass("active")).trigger("change")}b&&this.$element.toggleClass("active")};var c=a.fn.button;a.fn.button=function(c){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof c&&c;e||d.data("bs.button",e=new b(this,f)),"toggle"==c?e.toggle():c&&e.setState(c)})},a.fn.button.Constructor=b,a.fn.button.noConflict=function(){return a.fn.button=c,this},a(document).on("click.bs.button.data-api","[data-toggle^=button]",function(b){var c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle"),b.preventDefault()})}(jQuery),+function(a){"use strict";var b=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=this.sliding=this.interval=this.$active=this.$items=null,"hover"==this.options.pause&&this.$element.on("mouseenter",a.proxy(this.pause,this)).on("mouseleave",a.proxy(this.cycle,this))};b.DEFAULTS={interval:5e3,pause:"hover",wrap:!0},b.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},b.prototype.getActiveIndex=function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},b.prototype.to=function(b){var c=this,d=this.getActiveIndex();return b>this.$items.length-1||0>b?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){c.to(b)}):d==b?this.pause().cycle():this.slide(b>d?"next":"prev",a(this.$items[b]))},b.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition.end&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},b.prototype.next=function(){return this.sliding?void 0:this.slide("next")},b.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},b.prototype.slide=function(b,c){var d=this.$element.find(".item.active"),e=c||d[b](),f=this.interval,g="next"==b?"left":"right",h="next"==b?"first":"last",i=this;if(!e.length){if(!this.options.wrap)return;e=this.$element.find(".item")[h]()}this.sliding=!0,f&&this.pause();var j=a.Event("slide.bs.carousel",{relatedTarget:e[0],direction:g});if(!e.hasClass("active")){if(this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid.bs.carousel",function(){var b=a(i.$indicators.children()[i.getActiveIndex()]);b&&b.addClass("active")})),a.support.transition&&this.$element.hasClass("slide")){if(this.$element.trigger(j),j.isDefaultPrevented())return;e.addClass(b),e[0].offsetWidth,d.addClass(g),e.addClass(g),d.one(a.support.transition.end,function(){e.removeClass([b,g].join(" ")).addClass("active"),d.removeClass(["active",g].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger("slid.bs.carousel")},0)}).emulateTransitionEnd(600)}else{if(this.$element.trigger(j),j.isDefaultPrevented())return;d.removeClass("active"),e.addClass("active"),this.sliding=!1,this.$element.trigger("slid.bs.carousel")}return f&&this.cycle(),this}};var c=a.fn.carousel;a.fn.carousel=function(c){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c),g="string"==typeof c?c:f.slide;e||d.data("bs.carousel",e=new b(this,f)),"number"==typeof c?e.to(c):g?e[g]():f.interval&&e.pause().cycle()})},a.fn.carousel.Constructor=b,a.fn.carousel.noConflict=function(){return a.fn.carousel=c,this},a(document).on("click.bs.carousel.data-api","[data-slide], [data-slide-to]",function(b){var c,d=a(this),e=a(d.attr("data-target")||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"")),f=a.extend({},e.data(),d.data()),g=d.attr("data-slide-to");g&&(f.interval=!1),e.carousel(f),(g=d.attr("data-slide-to"))&&e.data("bs.carousel").to(g),b.preventDefault()}),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var b=a(this);b.carousel(b.data())})})}(jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d),this.transitioning=null,this.options.parent&&(this.$parent=a(this.options.parent)),this.options.toggle&&this.toggle()};b.DEFAULTS={toggle:!0},b.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},b.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b=a.Event("show.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.$parent&&this.$parent.find("> .panel > .in");if(c&&c.length){var d=c.data("bs.collapse");if(d&&d.transitioning)return;c.collapse("hide"),d||c.data("bs.collapse",null)}var e=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[e](0),this.transitioning=1;var f=function(){this.$element.removeClass("collapsing").addClass("in")[e]("auto"),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return f.call(this);var g=a.camelCase(["scroll",e].join("-"));this.$element.one(a.support.transition.end,a.proxy(f,this)).emulateTransitionEnd(350)[e](this.$element[0][g])}}},b.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse").removeClass("in"),this.transitioning=1;var d=function(){this.transitioning=0,this.$element.trigger("hidden.bs.collapse").removeClass("collapsing").addClass("collapse")};return a.support.transition?(this.$element[c](0).one(a.support.transition.end,a.proxy(d,this)).emulateTransitionEnd(350),void 0):d.call(this)}}},b.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()};var c=a.fn.collapse;a.fn.collapse=function(c){return this.each(function(){var d=a(this),e=d.data("bs.collapse"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c);e||d.data("bs.collapse",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.collapse.Constructor=b,a.fn.collapse.noConflict=function(){return a.fn.collapse=c,this},a(document).on("click.bs.collapse.data-api","[data-toggle=collapse]",function(b){var c,d=a(this),e=d.attr("data-target")||b.preventDefault()||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,""),f=a(e),g=f.data("bs.collapse"),h=g?"toggle":d.data(),i=d.attr("data-parent"),j=i&&a(i);g&&g.transitioning||(j&&j.find('[data-toggle=collapse][data-parent="'+i+'"]').not(d).addClass("collapsed"),d[f.hasClass("in")?"addClass":"removeClass"]("collapsed")),f.collapse(h)})}(jQuery),+function(a){"use strict";function b(){a(d).remove(),a(e).each(function(b){var d=c(a(this));d.hasClass("open")&&(d.trigger(b=a.Event("hide.bs.dropdown")),b.isDefaultPrevented()||d.removeClass("open").trigger("hidden.bs.dropdown"))})}function c(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}var d=".dropdown-backdrop",e="[data-toggle=dropdown]",f=function(b){a(b).on("click.bs.dropdown",this.toggle)};f.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=c(e),g=f.hasClass("open");if(b(),!g){if("ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(''}),b.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),b.prototype.constructor=b,b.prototype.getDefaults=function(){return b.DEFAULTS},b.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content")[this.options.html?"html":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},b.prototype.hasContent=function(){return this.getTitle()||this.getContent()},b.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},b.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")},b.prototype.tip=function(){return this.$tip||(this.$tip=a(this.options.template)),this.$tip};var c=a.fn.popover;a.fn.popover=function(c){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof c&&c;e||d.data("bs.popover",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.popover.Constructor=b,a.fn.popover.noConflict=function(){return a.fn.popover=c,this}}(jQuery),+function(a){"use strict";function b(c,d){var e,f=a.proxy(this.process,this);this.$element=a(c).is("body")?a(window):a(c),this.$body=a("body"),this.$scrollElement=this.$element.on("scroll.bs.scroll-spy.data-api",f),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||(e=a(c).attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.offsets=a([]),this.targets=a([]),this.activeTarget=null,this.refresh(),this.process()}b.DEFAULTS={offset:10},b.prototype.refresh=function(){var b=this.$element[0]==window?"offset":"position";this.offsets=a([]),this.targets=a([]);var c=this;this.$body.find(this.selector).map(function(){var d=a(this),e=d.data("target")||d.attr("href"),f=/^#\w/.test(e)&&a(e);return f&&f.length&&[[f[b]().top+(!a.isWindow(c.$scrollElement.get(0))&&c.$scrollElement.scrollTop()),e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){c.offsets.push(this[0]),c.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,d=c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(b>=d)return g!=(a=f.last()[0])&&this.activate(a);for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(!e[a+1]||b<=e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,a(this.selector).parents(".active").removeClass("active");var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate.bs.scrollspy")};var c=a.fn.scrollspy;a.fn.scrollspy=function(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=c,this},a(window).on("load",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);b.scrollspy(b.data())})})}(jQuery),+function(a){"use strict";var b=function(b){this.element=a(b)};b.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a")[0],f=a.Event("show.bs.tab",{relatedTarget:e});if(b.trigger(f),!f.isDefaultPrevented()){var g=a(d);this.activate(b.parent("li"),c),this.activate(g,g.parent(),function(){b.trigger({type:"shown.bs.tab",relatedTarget:e})})}}},b.prototype.activate=function(b,c,d){function e(){f.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),b.addClass("active"),g?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu")&&b.closest("li.dropdown").addClass("active"),d&&d()}var f=c.find("> .active"),g=d&&a.support.transition&&f.hasClass("fade");g?f.one(a.support.transition.end,e).emulateTransitionEnd(150):e(),f.removeClass("in")};var c=a.fn.tab;a.fn.tab=function(c){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new b(this)),"string"==typeof c&&e[c]()})},a.fn.tab.Constructor=b,a.fn.tab.noConflict=function(){return a.fn.tab=c,this},a(document).on("click.bs.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(b){b.preventDefault(),a(this).tab("show")})}(jQuery),+function(a){"use strict";var b=function(c,d){this.options=a.extend({},b.DEFAULTS,d),this.$window=a(window).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(c),this.affixed=this.unpin=null,this.checkPosition()};b.RESET="affix affix-top affix-bottom",b.DEFAULTS={offset:0},b.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},b.prototype.checkPosition=function(){if(this.$element.is(":visible")){var c=a(document).height(),d=this.$window.scrollTop(),e=this.$element.offset(),f=this.options.offset,g=f.top,h=f.bottom;"object"!=typeof f&&(h=g=f),"function"==typeof g&&(g=f.top()),"function"==typeof h&&(h=f.bottom());var i=null!=this.unpin&&d+this.unpin<=e.top?!1:null!=h&&e.top+this.$element.height()>=c-h?"bottom":null!=g&&g>=d?"top":!1;this.affixed!==i&&(this.unpin&&this.$element.css("top",""),this.affixed=i,this.unpin="bottom"==i?e.top-d:null,this.$element.removeClass(b.RESET).addClass("affix"+(i?"-"+i:"")),"bottom"==i&&this.$element.offset({top:document.body.offsetHeight-h-this.$element.height()}))}};var c=a.fn.affix;a.fn.affix=function(c){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof c&&c;e||d.data("bs.affix",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.affix.Constructor=b,a.fn.affix.noConflict=function(){return a.fn.affix=c,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var b=a(this),c=b.data();c.offset=c.offset||{},c.offsetBottom&&(c.offset.bottom=c.offsetBottom),c.offsetTop&&(c.offset.top=c.offsetTop),b.affix(c)})})}(jQuery); \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.common.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.common.js new file mode 100644 index 0000000000000000000000000000000000000000..f0659fd51c3110d908471258ae0e145eeae8759c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.common.js @@ -0,0 +1,3757 @@ +/*! + * Cropper v4.0.0 + * https://github.com/fengyuanchen/cropper + * + * Copyright (c) 2014-2018 Chen Fengyuan + * Released under the MIT license + * + * Date: 2018-04-01T06:27:27.267Z + */ + +'use strict'; + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var $ = _interopDefault(require('jquery')); + +var IN_BROWSER = typeof window !== 'undefined'; +var WINDOW = IN_BROWSER ? window : {}; +var NAMESPACE = 'cropper'; + +// Actions +var ACTION_ALL = 'all'; +var ACTION_CROP = 'crop'; +var ACTION_MOVE = 'move'; +var ACTION_ZOOM = 'zoom'; +var ACTION_EAST = 'e'; +var ACTION_WEST = 'w'; +var ACTION_SOUTH = 's'; +var ACTION_NORTH = 'n'; +var ACTION_NORTH_EAST = 'ne'; +var ACTION_NORTH_WEST = 'nw'; +var ACTION_SOUTH_EAST = 'se'; +var ACTION_SOUTH_WEST = 'sw'; + +// Classes +var CLASS_CROP = NAMESPACE + '-crop'; +var CLASS_DISABLED = NAMESPACE + '-disabled'; +var CLASS_HIDDEN = NAMESPACE + '-hidden'; +var CLASS_HIDE = NAMESPACE + '-hide'; +var CLASS_INVISIBLE = NAMESPACE + '-invisible'; +var CLASS_MODAL = NAMESPACE + '-modal'; +var CLASS_MOVE = NAMESPACE + '-move'; + +// Data keys +var DATA_ACTION = 'action'; +var DATA_PREVIEW = 'preview'; + +// Drag modes +var DRAG_MODE_CROP = 'crop'; +var DRAG_MODE_MOVE = 'move'; +var DRAG_MODE_NONE = 'none'; + +// Events +var EVENT_CROP = 'crop'; +var EVENT_CROP_END = 'cropend'; +var EVENT_CROP_MOVE = 'cropmove'; +var EVENT_CROP_START = 'cropstart'; +var EVENT_DBLCLICK = 'dblclick'; +var EVENT_LOAD = 'load'; +var EVENT_POINTER_DOWN = WINDOW.PointerEvent ? 'pointerdown' : 'touchstart mousedown'; +var EVENT_POINTER_MOVE = WINDOW.PointerEvent ? 'pointermove' : 'touchmove mousemove'; +var EVENT_POINTER_UP = WINDOW.PointerEvent ? 'pointerup pointercancel' : 'touchend touchcancel mouseup'; +var EVENT_READY = 'ready'; +var EVENT_RESIZE = 'resize'; +var EVENT_WHEEL = 'wheel mousewheel DOMMouseScroll'; +var EVENT_ZOOM = 'zoom'; + +// RegExps +var REGEXP_ACTIONS = /^(?:e|w|s|n|se|sw|ne|nw|all|crop|move|zoom)$/; +var REGEXP_DATA_URL = /^data:/; +var REGEXP_DATA_URL_JPEG = /^data:image\/jpeg;base64,/; +var REGEXP_TAG_NAME = /^(?:img|canvas)$/i; + +var DEFAULTS = { + // Define the view mode of the cropper + viewMode: 0, // 0, 1, 2, 3 + + // Define the dragging mode of the cropper + dragMode: DRAG_MODE_CROP, // 'crop', 'move' or 'none' + + // Define the aspect ratio of the crop box + aspectRatio: NaN, + + // An object with the previous cropping result data + data: null, + + // A selector for adding extra containers to preview + preview: '', + + // Re-render the cropper when resize the window + responsive: true, + + // Restore the cropped area after resize the window + restore: true, + + // Check if the current image is a cross-origin image + checkCrossOrigin: true, + + // Check the current image's Exif Orientation information + checkOrientation: true, + + // Show the black modal + modal: true, + + // Show the dashed lines for guiding + guides: true, + + // Show the center indicator for guiding + center: true, + + // Show the white modal to highlight the crop box + highlight: true, + + // Show the grid background + background: true, + + // Enable to crop the image automatically when initialize + autoCrop: true, + + // Define the percentage of automatic cropping area when initializes + autoCropArea: 0.8, + + // Enable to move the image + movable: true, + + // Enable to rotate the image + rotatable: true, + + // Enable to scale the image + scalable: true, + + // Enable to zoom the image + zoomable: true, + + // Enable to zoom the image by dragging touch + zoomOnTouch: true, + + // Enable to zoom the image by wheeling mouse + zoomOnWheel: true, + + // Define zoom ratio when zoom the image by wheeling mouse + wheelZoomRatio: 0.1, + + // Enable to move the crop box + cropBoxMovable: true, + + // Enable to resize the crop box + cropBoxResizable: true, + + // Toggle drag mode between "crop" and "move" when click twice on the cropper + toggleDragModeOnDblclick: true, + + // Size limitation + minCanvasWidth: 0, + minCanvasHeight: 0, + minCropBoxWidth: 0, + minCropBoxHeight: 0, + minContainerWidth: 200, + minContainerHeight: 100, + + // Shortcuts of events + ready: null, + cropstart: null, + cropmove: null, + cropend: null, + crop: null, + zoom: null +}; + +var TEMPLATE = '
' + '
' + '
' + '
' + '
' + '
' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '
' + '
'; + +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { + return typeof obj; +} : function (obj) { + return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; +}; + +var classCallCheck = function (instance, Constructor) { + if (!(instance instanceof Constructor)) { + throw new TypeError("Cannot call a class as a function"); + } +}; + +var createClass = function () { + function defineProperties(target, props) { + for (var i = 0; i < props.length; i++) { + var descriptor = props[i]; + descriptor.enumerable = descriptor.enumerable || false; + descriptor.configurable = true; + if ("value" in descriptor) descriptor.writable = true; + Object.defineProperty(target, descriptor.key, descriptor); + } + } + + return function (Constructor, protoProps, staticProps) { + if (protoProps) defineProperties(Constructor.prototype, protoProps); + if (staticProps) defineProperties(Constructor, staticProps); + return Constructor; + }; +}(); + +var toConsumableArray = function (arr) { + if (Array.isArray(arr)) { + for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; + + return arr2; + } else { + return Array.from(arr); + } +}; + +/** + * Check if the given value is not a number. + */ +var isNaN = Number.isNaN || WINDOW.isNaN; + +/** + * Check if the given value is a number. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a number, else `false`. + */ +function isNumber(value) { + return typeof value === 'number' && !isNaN(value); +} + +/** + * Check if the given value is undefined. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is undefined, else `false`. + */ +function isUndefined(value) { + return typeof value === 'undefined'; +} + +/** + * Check if the given value is an object. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is an object, else `false`. + */ +function isObject(value) { + return (typeof value === 'undefined' ? 'undefined' : _typeof(value)) === 'object' && value !== null; +} + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +/** + * Check if the given value is a plain object. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a plain object, else `false`. + */ + +function isPlainObject(value) { + if (!isObject(value)) { + return false; + } + + try { + var _constructor = value.constructor; + var prototype = _constructor.prototype; + + + return _constructor && prototype && hasOwnProperty.call(prototype, 'isPrototypeOf'); + } catch (e) { + return false; + } +} + +/** + * Check if the given value is a function. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a function, else `false`. + */ +function isFunction(value) { + return typeof value === 'function'; +} + +/** + * Iterate the given data. + * @param {*} data - The data to iterate. + * @param {Function} callback - The process function for each element. + * @returns {*} The original data. + */ +function forEach(data, callback) { + if (data && isFunction(callback)) { + if (Array.isArray(data) || isNumber(data.length) /* array-like */) { + var length = data.length; + + var i = void 0; + + for (i = 0; i < length; i += 1) { + if (callback.call(data, data[i], i, data) === false) { + break; + } + } + } else if (isObject(data)) { + Object.keys(data).forEach(function (key) { + callback.call(data, data[key], key, data); + }); + } + } + + return data; +} + +/** + * Extend the given object. + * @param {*} obj - The object to be extended. + * @param {*} args - The rest objects which will be merged to the first object. + * @returns {Object} The extended object. + */ +var assign = Object.assign || function assign(obj) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + if (isObject(obj) && args.length > 0) { + args.forEach(function (arg) { + if (isObject(arg)) { + Object.keys(arg).forEach(function (key) { + obj[key] = arg[key]; + }); + } + }); + } + + return obj; +}; + +var REGEXP_DECIMALS = /\.\d*(?:0|9){12}\d*$/i; + +/** + * Normalize decimal number. + * Check out {@link http://0.30000000000000004.com/} + * @param {number} value - The value to normalize. + * @param {number} [times=100000000000] - The times for normalizing. + * @returns {number} Returns the normalized number. + */ +function normalizeDecimalNumber(value) { + var times = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 100000000000; + + return REGEXP_DECIMALS.test(value) ? Math.round(value * times) / times : value; +} + +var REGEXP_SUFFIX = /^(?:width|height|left|top|marginLeft|marginTop)$/; + +/** + * Apply styles to the given element. + * @param {Element} element - The target element. + * @param {Object} styles - The styles for applying. + */ +function setStyle(element, styles) { + var style = element.style; + + + forEach(styles, function (value, property) { + if (REGEXP_SUFFIX.test(property) && isNumber(value)) { + value += 'px'; + } + + style[property] = value; + }); +} + +/** + * Check if the given element has a special class. + * @param {Element} element - The element to check. + * @param {string} value - The class to search. + * @returns {boolean} Returns `true` if the special class was found. + */ +function hasClass(element, value) { + return element.classList ? element.classList.contains(value) : element.className.indexOf(value) > -1; +} + +/** + * Add classes to the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be added. + */ +function addClass(element, value) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + addClass(elem, value); + }); + return; + } + + if (element.classList) { + element.classList.add(value); + return; + } + + var className = element.className.trim(); + + if (!className) { + element.className = value; + } else if (className.indexOf(value) < 0) { + element.className = className + ' ' + value; + } +} + +/** + * Remove classes from the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be removed. + */ +function removeClass(element, value) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + removeClass(elem, value); + }); + return; + } + + if (element.classList) { + element.classList.remove(value); + return; + } + + if (element.className.indexOf(value) >= 0) { + element.className = element.className.replace(value, ''); + } +} + +/** + * Add or remove classes from the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be toggled. + * @param {boolean} added - Add only. + */ +function toggleClass(element, value, added) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + toggleClass(elem, value, added); + }); + return; + } + + // IE10-11 doesn't support the second parameter of `classList.toggle` + if (added) { + addClass(element, value); + } else { + removeClass(element, value); + } +} + +var REGEXP_HYPHENATE = /([a-z\d])([A-Z])/g; + +/** + * Transform the given string from camelCase to kebab-case + * @param {string} value - The value to transform. + * @returns {string} The transformed value. + */ +function hyphenate(value) { + return value.replace(REGEXP_HYPHENATE, '$1-$2').toLowerCase(); +} + +/** + * Get data from the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to get. + * @returns {string} The data value. + */ +function getData(element, name) { + if (isObject(element[name])) { + return element[name]; + } else if (element.dataset) { + return element.dataset[name]; + } + + return element.getAttribute('data-' + hyphenate(name)); +} + +/** + * Set data to the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to set. + * @param {string} data - The data value. + */ +function setData(element, name, data) { + if (isObject(data)) { + element[name] = data; + } else if (element.dataset) { + element.dataset[name] = data; + } else { + element.setAttribute('data-' + hyphenate(name), data); + } +} + +/** + * Remove data from the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to remove. + */ +function removeData(element, name) { + if (isObject(element[name])) { + try { + delete element[name]; + } catch (e) { + element[name] = undefined; + } + } else if (element.dataset) { + // #128 Safari not allows to delete dataset property + try { + delete element.dataset[name]; + } catch (e) { + element.dataset[name] = undefined; + } + } else { + element.removeAttribute('data-' + hyphenate(name)); + } +} + +var REGEXP_SPACES = /\s\s*/; +var onceSupported = function () { + var supported = false; + + if (IN_BROWSER) { + var once = false; + var listener = function listener() {}; + var options = Object.defineProperty({}, 'once', { + get: function get$$1() { + supported = true; + return once; + }, + + + /** + * This setter can fix a `TypeError` in strict mode + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Getter_only} + * @param {boolean} value - The value to set + */ + set: function set$$1(value) { + once = value; + } + }); + + WINDOW.addEventListener('test', listener, options); + WINDOW.removeEventListener('test', listener, options); + } + + return supported; +}(); + +/** + * Remove event listener from the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Function} listener - The event listener. + * @param {Object} options - The event options. + */ +function removeListener(element, type, listener) { + var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var handler = listener; + + type.trim().split(REGEXP_SPACES).forEach(function (event) { + if (!onceSupported) { + var listeners = element.listeners; + + + if (listeners && listeners[event] && listeners[event][listener]) { + handler = listeners[event][listener]; + delete listeners[event][listener]; + + if (Object.keys(listeners[event]).length === 0) { + delete listeners[event]; + } + + if (Object.keys(listeners).length === 0) { + delete element.listeners; + } + } + } + + element.removeEventListener(event, handler, options); + }); +} + +/** + * Add event listener to the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Function} listener - The event listener. + * @param {Object} options - The event options. + */ +function addListener(element, type, listener) { + var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var _handler = listener; + + type.trim().split(REGEXP_SPACES).forEach(function (event) { + if (options.once && !onceSupported) { + var _element$listeners = element.listeners, + listeners = _element$listeners === undefined ? {} : _element$listeners; + + + _handler = function handler() { + for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { + args[_key2] = arguments[_key2]; + } + + delete listeners[event][listener]; + element.removeEventListener(event, _handler, options); + listener.apply(element, args); + }; + + if (!listeners[event]) { + listeners[event] = {}; + } + + if (listeners[event][listener]) { + element.removeEventListener(event, listeners[event][listener], options); + } + + listeners[event][listener] = _handler; + element.listeners = listeners; + } + + element.addEventListener(event, _handler, options); + }); +} + +/** + * Dispatch event on the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Object} data - The additional event data. + * @returns {boolean} Indicate if the event is default prevented or not. + */ +function dispatchEvent(element, type, data) { + var event = void 0; + + // Event and CustomEvent on IE9-11 are global objects, not constructors + if (isFunction(Event) && isFunction(CustomEvent)) { + event = new CustomEvent(type, { + detail: data, + bubbles: true, + cancelable: true + }); + } else { + event = document.createEvent('CustomEvent'); + event.initCustomEvent(type, true, true, data); + } + + return element.dispatchEvent(event); +} + +/** + * Get the offset base on the document. + * @param {Element} element - The target element. + * @returns {Object} The offset data. + */ +function getOffset(element) { + var box = element.getBoundingClientRect(); + + return { + left: box.left + (window.pageXOffset - document.documentElement.clientLeft), + top: box.top + (window.pageYOffset - document.documentElement.clientTop) + }; +} + +var location = WINDOW.location; + +var REGEXP_ORIGINS = /^(https?:)\/\/([^:/?#]+):?(\d*)/i; + +/** + * Check if the given URL is a cross origin URL. + * @param {string} url - The target URL. + * @returns {boolean} Returns `true` if the given URL is a cross origin URL, else `false`. + */ +function isCrossOriginURL(url) { + var parts = url.match(REGEXP_ORIGINS); + + return parts && (parts[1] !== location.protocol || parts[2] !== location.hostname || parts[3] !== location.port); +} + +/** + * Add timestamp to the given URL. + * @param {string} url - The target URL. + * @returns {string} The result URL. + */ +function addTimestamp(url) { + var timestamp = 'timestamp=' + new Date().getTime(); + + return url + (url.indexOf('?') === -1 ? '?' : '&') + timestamp; +} + +/** + * Get transforms base on the given object. + * @param {Object} obj - The target object. + * @returns {string} A string contains transform values. + */ +function getTransforms(_ref) { + var rotate = _ref.rotate, + scaleX = _ref.scaleX, + scaleY = _ref.scaleY, + translateX = _ref.translateX, + translateY = _ref.translateY; + + var values = []; + + if (isNumber(translateX) && translateX !== 0) { + values.push('translateX(' + translateX + 'px)'); + } + + if (isNumber(translateY) && translateY !== 0) { + values.push('translateY(' + translateY + 'px)'); + } + + // Rotate should come first before scale to match orientation transform + if (isNumber(rotate) && rotate !== 0) { + values.push('rotate(' + rotate + 'deg)'); + } + + if (isNumber(scaleX) && scaleX !== 1) { + values.push('scaleX(' + scaleX + ')'); + } + + if (isNumber(scaleY) && scaleY !== 1) { + values.push('scaleY(' + scaleY + ')'); + } + + var transform = values.length ? values.join(' ') : 'none'; + + return { + WebkitTransform: transform, + msTransform: transform, + transform: transform + }; +} + +/** + * Get the max ratio of a group of pointers. + * @param {string} pointers - The target pointers. + * @returns {number} The result ratio. + */ +function getMaxZoomRatio(pointers) { + var pointers2 = assign({}, pointers); + var ratios = []; + + forEach(pointers, function (pointer, pointerId) { + delete pointers2[pointerId]; + + forEach(pointers2, function (pointer2) { + var x1 = Math.abs(pointer.startX - pointer2.startX); + var y1 = Math.abs(pointer.startY - pointer2.startY); + var x2 = Math.abs(pointer.endX - pointer2.endX); + var y2 = Math.abs(pointer.endY - pointer2.endY); + var z1 = Math.sqrt(x1 * x1 + y1 * y1); + var z2 = Math.sqrt(x2 * x2 + y2 * y2); + var ratio = (z2 - z1) / z1; + + ratios.push(ratio); + }); + }); + + ratios.sort(function (a, b) { + return Math.abs(a) < Math.abs(b); + }); + + return ratios[0]; +} + +/** + * Get a pointer from an event object. + * @param {Object} event - The target event object. + * @param {boolean} endOnly - Indicates if only returns the end point coordinate or not. + * @returns {Object} The result pointer contains start and/or end point coordinates. + */ +function getPointer(_ref2, endOnly) { + var pageX = _ref2.pageX, + pageY = _ref2.pageY; + + var end = { + endX: pageX, + endY: pageY + }; + + return endOnly ? end : assign({ + startX: pageX, + startY: pageY + }, end); +} + +/** + * Get the center point coordinate of a group of pointers. + * @param {Object} pointers - The target pointers. + * @returns {Object} The center point coordinate. + */ +function getPointersCenter(pointers) { + var pageX = 0; + var pageY = 0; + var count = 0; + + forEach(pointers, function (_ref3) { + var startX = _ref3.startX, + startY = _ref3.startY; + + pageX += startX; + pageY += startY; + count += 1; + }); + + pageX /= count; + pageY /= count; + + return { + pageX: pageX, + pageY: pageY + }; +} + +/** + * Check if the given value is a finite number. + */ +var isFinite = Number.isFinite || WINDOW.isFinite; + +/** + * Get the max sizes in a rectangle under the given aspect ratio. + * @param {Object} data - The original sizes. + * @param {string} [type='contain'] - The adjust type. + * @returns {Object} The result sizes. + */ +function getAdjustedSizes(_ref4) // or 'cover' +{ + var aspectRatio = _ref4.aspectRatio, + height = _ref4.height, + width = _ref4.width; + var type = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'contain'; + + var isValidNumber = function isValidNumber(value) { + return isFinite(value) && value > 0; + }; + + if (isValidNumber(width) && isValidNumber(height)) { + var adjustedWidth = height * aspectRatio; + + if (type === 'contain' && adjustedWidth > width || type === 'cover' && adjustedWidth < width) { + height = width / aspectRatio; + } else { + width = height * aspectRatio; + } + } else if (isValidNumber(width)) { + height = width / aspectRatio; + } else if (isValidNumber(height)) { + width = height * aspectRatio; + } + + return { + width: width, + height: height + }; +} + +/** + * Get the new sizes of a rectangle after rotated. + * @param {Object} data - The original sizes. + * @returns {Object} The result sizes. + */ +function getRotatedSizes(_ref5) { + var width = _ref5.width, + height = _ref5.height, + degree = _ref5.degree; + + degree = Math.abs(degree) % 180; + + if (degree === 90) { + return { + width: height, + height: width + }; + } + + var arc = degree % 90 * Math.PI / 180; + var sinArc = Math.sin(arc); + var cosArc = Math.cos(arc); + var newWidth = width * cosArc + height * sinArc; + var newHeight = width * sinArc + height * cosArc; + + return degree > 90 ? { + width: newHeight, + height: newWidth + } : { + width: newWidth, + height: newHeight + }; +} + +/** + * Get a canvas which drew the given image. + * @param {HTMLImageElement} image - The image for drawing. + * @param {Object} imageData - The image data. + * @param {Object} canvasData - The canvas data. + * @param {Object} options - The options. + * @returns {HTMLCanvasElement} The result canvas. + */ +function getSourceCanvas(image, _ref6, _ref7, _ref8) { + var imageAspectRatio = _ref6.aspectRatio, + imageNaturalWidth = _ref6.naturalWidth, + imageNaturalHeight = _ref6.naturalHeight, + _ref6$rotate = _ref6.rotate, + rotate = _ref6$rotate === undefined ? 0 : _ref6$rotate, + _ref6$scaleX = _ref6.scaleX, + scaleX = _ref6$scaleX === undefined ? 1 : _ref6$scaleX, + _ref6$scaleY = _ref6.scaleY, + scaleY = _ref6$scaleY === undefined ? 1 : _ref6$scaleY; + var aspectRatio = _ref7.aspectRatio, + naturalWidth = _ref7.naturalWidth, + naturalHeight = _ref7.naturalHeight; + var _ref8$fillColor = _ref8.fillColor, + fillColor = _ref8$fillColor === undefined ? 'transparent' : _ref8$fillColor, + _ref8$imageSmoothingE = _ref8.imageSmoothingEnabled, + imageSmoothingEnabled = _ref8$imageSmoothingE === undefined ? true : _ref8$imageSmoothingE, + _ref8$imageSmoothingQ = _ref8.imageSmoothingQuality, + imageSmoothingQuality = _ref8$imageSmoothingQ === undefined ? 'low' : _ref8$imageSmoothingQ, + _ref8$maxWidth = _ref8.maxWidth, + maxWidth = _ref8$maxWidth === undefined ? Infinity : _ref8$maxWidth, + _ref8$maxHeight = _ref8.maxHeight, + maxHeight = _ref8$maxHeight === undefined ? Infinity : _ref8$maxHeight, + _ref8$minWidth = _ref8.minWidth, + minWidth = _ref8$minWidth === undefined ? 0 : _ref8$minWidth, + _ref8$minHeight = _ref8.minHeight, + minHeight = _ref8$minHeight === undefined ? 0 : _ref8$minHeight; + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + var maxSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: maxWidth, + height: maxHeight + }); + var minSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: minWidth, + height: minHeight + }, 'cover'); + var width = Math.min(maxSizes.width, Math.max(minSizes.width, naturalWidth)); + var height = Math.min(maxSizes.height, Math.max(minSizes.height, naturalHeight)); + + // Note: should always use image's natural sizes for drawing as + // imageData.naturalWidth === canvasData.naturalHeight when rotate % 180 === 90 + var destMaxSizes = getAdjustedSizes({ + aspectRatio: imageAspectRatio, + width: maxWidth, + height: maxHeight + }); + var destMinSizes = getAdjustedSizes({ + aspectRatio: imageAspectRatio, + width: minWidth, + height: minHeight + }, 'cover'); + var destWidth = Math.min(destMaxSizes.width, Math.max(destMinSizes.width, imageNaturalWidth)); + var destHeight = Math.min(destMaxSizes.height, Math.max(destMinSizes.height, imageNaturalHeight)); + var params = [-destWidth / 2, -destHeight / 2, destWidth, destHeight]; + + canvas.width = normalizeDecimalNumber(width); + canvas.height = normalizeDecimalNumber(height); + context.fillStyle = fillColor; + context.fillRect(0, 0, width, height); + context.save(); + context.translate(width / 2, height / 2); + context.rotate(rotate * Math.PI / 180); + context.scale(scaleX, scaleY); + context.imageSmoothingEnabled = imageSmoothingEnabled; + context.imageSmoothingQuality = imageSmoothingQuality; + context.drawImage.apply(context, [image].concat(toConsumableArray(params.map(function (param) { + return Math.floor(normalizeDecimalNumber(param)); + })))); + context.restore(); + return canvas; +} + +var fromCharCode = String.fromCharCode; + +/** + * Get string from char code in data view. + * @param {DataView} dataView - The data view for read. + * @param {number} start - The start index. + * @param {number} length - The read length. + * @returns {string} The read result. + */ + +function getStringFromCharCode(dataView, start, length) { + var str = ''; + var i = void 0; + + length += start; + + for (i = start; i < length; i += 1) { + str += fromCharCode(dataView.getUint8(i)); + } + + return str; +} + +var REGEXP_DATA_URL_HEAD = /^data:.*,/; + +/** + * Transform Data URL to array buffer. + * @param {string} dataURL - The Data URL to transform. + * @returns {ArrayBuffer} The result array buffer. + */ +function dataURLToArrayBuffer(dataURL) { + var base64 = dataURL.replace(REGEXP_DATA_URL_HEAD, ''); + var binary = atob(base64); + var arrayBuffer = new ArrayBuffer(binary.length); + var uint8 = new Uint8Array(arrayBuffer); + + forEach(uint8, function (value, i) { + uint8[i] = binary.charCodeAt(i); + }); + + return arrayBuffer; +} + +/** + * Transform array buffer to Data URL. + * @param {ArrayBuffer} arrayBuffer - The array buffer to transform. + * @param {string} mimeType - The mime type of the Data URL. + * @returns {string} The result Data URL. + */ +function arrayBufferToDataURL(arrayBuffer, mimeType) { + var uint8 = new Uint8Array(arrayBuffer); + var data = ''; + + // TypedArray.prototype.forEach is not supported in some browsers. + forEach(uint8, function (value) { + data += fromCharCode(value); + }); + + return 'data:' + mimeType + ';base64,' + btoa(data); +} + +/** + * Get orientation value from given array buffer. + * @param {ArrayBuffer} arrayBuffer - The array buffer to read. + * @returns {number} The read orientation value. + */ +function getOrientation(arrayBuffer) { + var dataView = new DataView(arrayBuffer); + var orientation = void 0; + var littleEndian = void 0; + var app1Start = void 0; + var ifdStart = void 0; + + // Only handle JPEG image (start by 0xFFD8) + if (dataView.getUint8(0) === 0xFF && dataView.getUint8(1) === 0xD8) { + var length = dataView.byteLength; + var offset = 2; + + while (offset < length) { + if (dataView.getUint8(offset) === 0xFF && dataView.getUint8(offset + 1) === 0xE1) { + app1Start = offset; + break; + } + + offset += 1; + } + } + + if (app1Start) { + var exifIDCode = app1Start + 4; + var tiffOffset = app1Start + 10; + + if (getStringFromCharCode(dataView, exifIDCode, 4) === 'Exif') { + var endianness = dataView.getUint16(tiffOffset); + + littleEndian = endianness === 0x4949; + + if (littleEndian || endianness === 0x4D4D /* bigEndian */) { + if (dataView.getUint16(tiffOffset + 2, littleEndian) === 0x002A) { + var firstIFDOffset = dataView.getUint32(tiffOffset + 4, littleEndian); + + if (firstIFDOffset >= 0x00000008) { + ifdStart = tiffOffset + firstIFDOffset; + } + } + } + } + } + + if (ifdStart) { + var _length = dataView.getUint16(ifdStart, littleEndian); + var _offset = void 0; + var i = void 0; + + for (i = 0; i < _length; i += 1) { + _offset = ifdStart + i * 12 + 2; + + if (dataView.getUint16(_offset, littleEndian) === 0x0112 /* Orientation */) { + // 8 is the offset of the current tag's value + _offset += 8; + + // Get the original orientation value + orientation = dataView.getUint16(_offset, littleEndian); + + // Override the orientation with its default value + dataView.setUint16(_offset, 1, littleEndian); + break; + } + } + } + + return orientation; +} + +/** + * Parse Exif Orientation value. + * @param {number} orientation - The orientation to parse. + * @returns {Object} The parsed result. + */ +function parseOrientation(orientation) { + var rotate = 0; + var scaleX = 1; + var scaleY = 1; + + switch (orientation) { + // Flip horizontal + case 2: + scaleX = -1; + break; + + // Rotate left 180° + case 3: + rotate = -180; + break; + + // Flip vertical + case 4: + scaleY = -1; + break; + + // Flip vertical and rotate right 90° + case 5: + rotate = 90; + scaleY = -1; + break; + + // Rotate right 90° + case 6: + rotate = 90; + break; + + // Flip horizontal and rotate right 90° + case 7: + rotate = 90; + scaleX = -1; + break; + + // Rotate left 90° + case 8: + rotate = -90; + break; + + default: + } + + return { + rotate: rotate, + scaleX: scaleX, + scaleY: scaleY + }; +} + +var render = { + render: function render() { + this.initContainer(); + this.initCanvas(); + this.initCropBox(); + this.renderCanvas(); + + if (this.cropped) { + this.renderCropBox(); + } + }, + initContainer: function initContainer() { + var element = this.element, + options = this.options, + container = this.container, + cropper = this.cropper; + + + addClass(cropper, CLASS_HIDDEN); + removeClass(element, CLASS_HIDDEN); + + var containerData = { + width: Math.max(container.offsetWidth, Number(options.minContainerWidth) || 200), + height: Math.max(container.offsetHeight, Number(options.minContainerHeight) || 100) + }; + + this.containerData = containerData; + + setStyle(cropper, { + width: containerData.width, + height: containerData.height + }); + + addClass(element, CLASS_HIDDEN); + removeClass(cropper, CLASS_HIDDEN); + }, + + + // Canvas (image wrapper) + initCanvas: function initCanvas() { + var containerData = this.containerData, + imageData = this.imageData; + var viewMode = this.options.viewMode; + + var rotated = Math.abs(imageData.rotate) % 180 === 90; + var naturalWidth = rotated ? imageData.naturalHeight : imageData.naturalWidth; + var naturalHeight = rotated ? imageData.naturalWidth : imageData.naturalHeight; + var aspectRatio = naturalWidth / naturalHeight; + var canvasWidth = containerData.width; + var canvasHeight = containerData.height; + + if (containerData.height * aspectRatio > containerData.width) { + if (viewMode === 3) { + canvasWidth = containerData.height * aspectRatio; + } else { + canvasHeight = containerData.width / aspectRatio; + } + } else if (viewMode === 3) { + canvasHeight = containerData.width / aspectRatio; + } else { + canvasWidth = containerData.height * aspectRatio; + } + + var canvasData = { + aspectRatio: aspectRatio, + naturalWidth: naturalWidth, + naturalHeight: naturalHeight, + width: canvasWidth, + height: canvasHeight + }; + + canvasData.left = (containerData.width - canvasWidth) / 2; + canvasData.top = (containerData.height - canvasHeight) / 2; + canvasData.oldLeft = canvasData.left; + canvasData.oldTop = canvasData.top; + + this.canvasData = canvasData; + this.limited = viewMode === 1 || viewMode === 2; + this.limitCanvas(true, true); + this.initialImageData = assign({}, imageData); + this.initialCanvasData = assign({}, canvasData); + }, + limitCanvas: function limitCanvas(sizeLimited, positionLimited) { + var options = this.options, + containerData = this.containerData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + var viewMode = options.viewMode; + var aspectRatio = canvasData.aspectRatio; + + var cropped = this.cropped && cropBoxData; + + if (sizeLimited) { + var minCanvasWidth = Number(options.minCanvasWidth) || 0; + var minCanvasHeight = Number(options.minCanvasHeight) || 0; + + if (viewMode > 1) { + minCanvasWidth = Math.max(minCanvasWidth, containerData.width); + minCanvasHeight = Math.max(minCanvasHeight, containerData.height); + + if (viewMode === 3) { + if (minCanvasHeight * aspectRatio > minCanvasWidth) { + minCanvasWidth = minCanvasHeight * aspectRatio; + } else { + minCanvasHeight = minCanvasWidth / aspectRatio; + } + } + } else if (viewMode > 0) { + if (minCanvasWidth) { + minCanvasWidth = Math.max(minCanvasWidth, cropped ? cropBoxData.width : 0); + } else if (minCanvasHeight) { + minCanvasHeight = Math.max(minCanvasHeight, cropped ? cropBoxData.height : 0); + } else if (cropped) { + minCanvasWidth = cropBoxData.width; + minCanvasHeight = cropBoxData.height; + + if (minCanvasHeight * aspectRatio > minCanvasWidth) { + minCanvasWidth = minCanvasHeight * aspectRatio; + } else { + minCanvasHeight = minCanvasWidth / aspectRatio; + } + } + } + + var _getAdjustedSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: minCanvasWidth, + height: minCanvasHeight + }); + + minCanvasWidth = _getAdjustedSizes.width; + minCanvasHeight = _getAdjustedSizes.height; + + + canvasData.minWidth = minCanvasWidth; + canvasData.minHeight = minCanvasHeight; + canvasData.maxWidth = Infinity; + canvasData.maxHeight = Infinity; + } + + if (positionLimited) { + if (viewMode) { + var newCanvasLeft = containerData.width - canvasData.width; + var newCanvasTop = containerData.height - canvasData.height; + + canvasData.minLeft = Math.min(0, newCanvasLeft); + canvasData.minTop = Math.min(0, newCanvasTop); + canvasData.maxLeft = Math.max(0, newCanvasLeft); + canvasData.maxTop = Math.max(0, newCanvasTop); + + if (cropped && this.limited) { + canvasData.minLeft = Math.min(cropBoxData.left, cropBoxData.left + (cropBoxData.width - canvasData.width)); + canvasData.minTop = Math.min(cropBoxData.top, cropBoxData.top + (cropBoxData.height - canvasData.height)); + canvasData.maxLeft = cropBoxData.left; + canvasData.maxTop = cropBoxData.top; + + if (viewMode === 2) { + if (canvasData.width >= containerData.width) { + canvasData.minLeft = Math.min(0, newCanvasLeft); + canvasData.maxLeft = Math.max(0, newCanvasLeft); + } + + if (canvasData.height >= containerData.height) { + canvasData.minTop = Math.min(0, newCanvasTop); + canvasData.maxTop = Math.max(0, newCanvasTop); + } + } + } + } else { + canvasData.minLeft = -canvasData.width; + canvasData.minTop = -canvasData.height; + canvasData.maxLeft = containerData.width; + canvasData.maxTop = containerData.height; + } + } + }, + renderCanvas: function renderCanvas(changed, transformed) { + var canvasData = this.canvasData, + imageData = this.imageData; + + + if (transformed) { + var _getRotatedSizes = getRotatedSizes({ + width: imageData.naturalWidth * Math.abs(imageData.scaleX || 1), + height: imageData.naturalHeight * Math.abs(imageData.scaleY || 1), + degree: imageData.rotate || 0 + }), + naturalWidth = _getRotatedSizes.width, + naturalHeight = _getRotatedSizes.height; + + var width = canvasData.width * (naturalWidth / canvasData.naturalWidth); + var height = canvasData.height * (naturalHeight / canvasData.naturalHeight); + + canvasData.left -= (width - canvasData.width) / 2; + canvasData.top -= (height - canvasData.height) / 2; + canvasData.width = width; + canvasData.height = height; + canvasData.aspectRatio = naturalWidth / naturalHeight; + canvasData.naturalWidth = naturalWidth; + canvasData.naturalHeight = naturalHeight; + this.limitCanvas(true, false); + } + + if (canvasData.width > canvasData.maxWidth || canvasData.width < canvasData.minWidth) { + canvasData.left = canvasData.oldLeft; + } + + if (canvasData.height > canvasData.maxHeight || canvasData.height < canvasData.minHeight) { + canvasData.top = canvasData.oldTop; + } + + canvasData.width = Math.min(Math.max(canvasData.width, canvasData.minWidth), canvasData.maxWidth); + canvasData.height = Math.min(Math.max(canvasData.height, canvasData.minHeight), canvasData.maxHeight); + + this.limitCanvas(false, true); + + canvasData.left = Math.min(Math.max(canvasData.left, canvasData.minLeft), canvasData.maxLeft); + canvasData.top = Math.min(Math.max(canvasData.top, canvasData.minTop), canvasData.maxTop); + canvasData.oldLeft = canvasData.left; + canvasData.oldTop = canvasData.top; + + setStyle(this.canvas, assign({ + width: canvasData.width, + height: canvasData.height + }, getTransforms({ + translateX: canvasData.left, + translateY: canvasData.top + }))); + + this.renderImage(changed); + + if (this.cropped && this.limited) { + this.limitCropBox(true, true); + } + }, + renderImage: function renderImage(changed) { + var canvasData = this.canvasData, + imageData = this.imageData; + + var width = imageData.naturalWidth * (canvasData.width / canvasData.naturalWidth); + var height = imageData.naturalHeight * (canvasData.height / canvasData.naturalHeight); + + assign(imageData, { + width: width, + height: height, + left: (canvasData.width - width) / 2, + top: (canvasData.height - height) / 2 + }); + setStyle(this.image, assign({ + width: imageData.width, + height: imageData.height + }, getTransforms(assign({ + translateX: imageData.left, + translateY: imageData.top + }, imageData)))); + + if (changed) { + this.output(); + } + }, + initCropBox: function initCropBox() { + var options = this.options, + canvasData = this.canvasData; + var aspectRatio = options.aspectRatio; + + var autoCropArea = Number(options.autoCropArea) || 0.8; + var cropBoxData = { + width: canvasData.width, + height: canvasData.height + }; + + if (aspectRatio) { + if (canvasData.height * aspectRatio > canvasData.width) { + cropBoxData.height = cropBoxData.width / aspectRatio; + } else { + cropBoxData.width = cropBoxData.height * aspectRatio; + } + } + + this.cropBoxData = cropBoxData; + this.limitCropBox(true, true); + + // Initialize auto crop area + cropBoxData.width = Math.min(Math.max(cropBoxData.width, cropBoxData.minWidth), cropBoxData.maxWidth); + cropBoxData.height = Math.min(Math.max(cropBoxData.height, cropBoxData.minHeight), cropBoxData.maxHeight); + + // The width/height of auto crop area must large than "minWidth/Height" + cropBoxData.width = Math.max(cropBoxData.minWidth, cropBoxData.width * autoCropArea); + cropBoxData.height = Math.max(cropBoxData.minHeight, cropBoxData.height * autoCropArea); + cropBoxData.left = canvasData.left + (canvasData.width - cropBoxData.width) / 2; + cropBoxData.top = canvasData.top + (canvasData.height - cropBoxData.height) / 2; + cropBoxData.oldLeft = cropBoxData.left; + cropBoxData.oldTop = cropBoxData.top; + + this.initialCropBoxData = assign({}, cropBoxData); + }, + limitCropBox: function limitCropBox(sizeLimited, positionLimited) { + var options = this.options, + containerData = this.containerData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData, + limited = this.limited; + var aspectRatio = options.aspectRatio; + + + if (sizeLimited) { + var minCropBoxWidth = Number(options.minCropBoxWidth) || 0; + var minCropBoxHeight = Number(options.minCropBoxHeight) || 0; + var maxCropBoxWidth = Math.min(containerData.width, limited ? canvasData.width : containerData.width); + var maxCropBoxHeight = Math.min(containerData.height, limited ? canvasData.height : containerData.height); + + // The min/maxCropBoxWidth/Height must be less than container's width/height + minCropBoxWidth = Math.min(minCropBoxWidth, containerData.width); + minCropBoxHeight = Math.min(minCropBoxHeight, containerData.height); + + if (aspectRatio) { + if (minCropBoxWidth && minCropBoxHeight) { + if (minCropBoxHeight * aspectRatio > minCropBoxWidth) { + minCropBoxHeight = minCropBoxWidth / aspectRatio; + } else { + minCropBoxWidth = minCropBoxHeight * aspectRatio; + } + } else if (minCropBoxWidth) { + minCropBoxHeight = minCropBoxWidth / aspectRatio; + } else if (minCropBoxHeight) { + minCropBoxWidth = minCropBoxHeight * aspectRatio; + } + + if (maxCropBoxHeight * aspectRatio > maxCropBoxWidth) { + maxCropBoxHeight = maxCropBoxWidth / aspectRatio; + } else { + maxCropBoxWidth = maxCropBoxHeight * aspectRatio; + } + } + + // The minWidth/Height must be less than maxWidth/Height + cropBoxData.minWidth = Math.min(minCropBoxWidth, maxCropBoxWidth); + cropBoxData.minHeight = Math.min(minCropBoxHeight, maxCropBoxHeight); + cropBoxData.maxWidth = maxCropBoxWidth; + cropBoxData.maxHeight = maxCropBoxHeight; + } + + if (positionLimited) { + if (limited) { + cropBoxData.minLeft = Math.max(0, canvasData.left); + cropBoxData.minTop = Math.max(0, canvasData.top); + cropBoxData.maxLeft = Math.min(containerData.width, canvasData.left + canvasData.width) - cropBoxData.width; + cropBoxData.maxTop = Math.min(containerData.height, canvasData.top + canvasData.height) - cropBoxData.height; + } else { + cropBoxData.minLeft = 0; + cropBoxData.minTop = 0; + cropBoxData.maxLeft = containerData.width - cropBoxData.width; + cropBoxData.maxTop = containerData.height - cropBoxData.height; + } + } + }, + renderCropBox: function renderCropBox() { + var options = this.options, + containerData = this.containerData, + cropBoxData = this.cropBoxData; + + + if (cropBoxData.width > cropBoxData.maxWidth || cropBoxData.width < cropBoxData.minWidth) { + cropBoxData.left = cropBoxData.oldLeft; + } + + if (cropBoxData.height > cropBoxData.maxHeight || cropBoxData.height < cropBoxData.minHeight) { + cropBoxData.top = cropBoxData.oldTop; + } + + cropBoxData.width = Math.min(Math.max(cropBoxData.width, cropBoxData.minWidth), cropBoxData.maxWidth); + cropBoxData.height = Math.min(Math.max(cropBoxData.height, cropBoxData.minHeight), cropBoxData.maxHeight); + + this.limitCropBox(false, true); + + cropBoxData.left = Math.min(Math.max(cropBoxData.left, cropBoxData.minLeft), cropBoxData.maxLeft); + cropBoxData.top = Math.min(Math.max(cropBoxData.top, cropBoxData.minTop), cropBoxData.maxTop); + cropBoxData.oldLeft = cropBoxData.left; + cropBoxData.oldTop = cropBoxData.top; + + if (options.movable && options.cropBoxMovable) { + // Turn to move the canvas when the crop box is equal to the container + setData(this.face, DATA_ACTION, cropBoxData.width >= containerData.width && cropBoxData.height >= containerData.height ? ACTION_MOVE : ACTION_ALL); + } + + setStyle(this.cropBox, assign({ + width: cropBoxData.width, + height: cropBoxData.height + }, getTransforms({ + translateX: cropBoxData.left, + translateY: cropBoxData.top + }))); + + if (this.cropped && this.limited) { + this.limitCanvas(true, true); + } + + if (!this.disabled) { + this.output(); + } + }, + output: function output() { + this.preview(); + dispatchEvent(this.element, EVENT_CROP, this.getData()); + } +}; + +var preview = { + initPreview: function initPreview() { + var crossOrigin = this.crossOrigin; + var preview = this.options.preview; + + var url = crossOrigin ? this.crossOriginUrl : this.url; + var image = document.createElement('img'); + + if (crossOrigin) { + image.crossOrigin = crossOrigin; + } + + image.src = url; + this.viewBox.appendChild(image); + this.viewBoxImage = image; + + if (!preview) { + return; + } + + var previews = preview; + + if (typeof preview === 'string') { + previews = this.element.ownerDocument.querySelectorAll(preview); + } else if (preview.querySelector) { + previews = [preview]; + } + + this.previews = previews; + + forEach(previews, function (el) { + var img = document.createElement('img'); + + // Save the original size for recover + setData(el, DATA_PREVIEW, { + width: el.offsetWidth, + height: el.offsetHeight, + html: el.innerHTML + }); + + if (crossOrigin) { + img.crossOrigin = crossOrigin; + } + + img.src = url; + + /** + * Override img element styles + * Add `display:block` to avoid margin top issue + * Add `height:auto` to override `height` attribute on IE8 + * (Occur only when margin-top <= -height) + */ + img.style.cssText = 'display:block;' + 'width:100%;' + 'height:auto;' + 'min-width:0!important;' + 'min-height:0!important;' + 'max-width:none!important;' + 'max-height:none!important;' + 'image-orientation:0deg!important;"'; + + el.innerHTML = ''; + el.appendChild(img); + }); + }, + resetPreview: function resetPreview() { + forEach(this.previews, function (element) { + var data = getData(element, DATA_PREVIEW); + + setStyle(element, { + width: data.width, + height: data.height + }); + + element.innerHTML = data.html; + removeData(element, DATA_PREVIEW); + }); + }, + preview: function preview() { + var imageData = this.imageData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + var cropBoxWidth = cropBoxData.width, + cropBoxHeight = cropBoxData.height; + var width = imageData.width, + height = imageData.height; + + var left = cropBoxData.left - canvasData.left - imageData.left; + var top = cropBoxData.top - canvasData.top - imageData.top; + + if (!this.cropped || this.disabled) { + return; + } + + setStyle(this.viewBoxImage, assign({ + width: width, + height: height + }, getTransforms(assign({ + translateX: -left, + translateY: -top + }, imageData)))); + + forEach(this.previews, function (element) { + var data = getData(element, DATA_PREVIEW); + var originalWidth = data.width; + var originalHeight = data.height; + var newWidth = originalWidth; + var newHeight = originalHeight; + var ratio = 1; + + if (cropBoxWidth) { + ratio = originalWidth / cropBoxWidth; + newHeight = cropBoxHeight * ratio; + } + + if (cropBoxHeight && newHeight > originalHeight) { + ratio = originalHeight / cropBoxHeight; + newWidth = cropBoxWidth * ratio; + newHeight = originalHeight; + } + + setStyle(element, { + width: newWidth, + height: newHeight + }); + + setStyle(element.getElementsByTagName('img')[0], assign({ + width: width * ratio, + height: height * ratio + }, getTransforms(assign({ + translateX: -left * ratio, + translateY: -top * ratio + }, imageData)))); + }); + } +}; + +var events = { + bind: function bind() { + var element = this.element, + options = this.options, + cropper = this.cropper; + + + if (isFunction(options.cropstart)) { + addListener(element, EVENT_CROP_START, options.cropstart); + } + + if (isFunction(options.cropmove)) { + addListener(element, EVENT_CROP_MOVE, options.cropmove); + } + + if (isFunction(options.cropend)) { + addListener(element, EVENT_CROP_END, options.cropend); + } + + if (isFunction(options.crop)) { + addListener(element, EVENT_CROP, options.crop); + } + + if (isFunction(options.zoom)) { + addListener(element, EVENT_ZOOM, options.zoom); + } + + addListener(cropper, EVENT_POINTER_DOWN, this.onCropStart = this.cropStart.bind(this)); + + if (options.zoomable && options.zoomOnWheel) { + addListener(cropper, EVENT_WHEEL, this.onWheel = this.wheel.bind(this)); + } + + if (options.toggleDragModeOnDblclick) { + addListener(cropper, EVENT_DBLCLICK, this.onDblclick = this.dblclick.bind(this)); + } + + addListener(element.ownerDocument, EVENT_POINTER_MOVE, this.onCropMove = this.cropMove.bind(this)); + addListener(element.ownerDocument, EVENT_POINTER_UP, this.onCropEnd = this.cropEnd.bind(this)); + + if (options.responsive) { + addListener(window, EVENT_RESIZE, this.onResize = this.resize.bind(this)); + } + }, + unbind: function unbind() { + var element = this.element, + options = this.options, + cropper = this.cropper; + + + if (isFunction(options.cropstart)) { + removeListener(element, EVENT_CROP_START, options.cropstart); + } + + if (isFunction(options.cropmove)) { + removeListener(element, EVENT_CROP_MOVE, options.cropmove); + } + + if (isFunction(options.cropend)) { + removeListener(element, EVENT_CROP_END, options.cropend); + } + + if (isFunction(options.crop)) { + removeListener(element, EVENT_CROP, options.crop); + } + + if (isFunction(options.zoom)) { + removeListener(element, EVENT_ZOOM, options.zoom); + } + + removeListener(cropper, EVENT_POINTER_DOWN, this.onCropStart); + + if (options.zoomable && options.zoomOnWheel) { + removeListener(cropper, EVENT_WHEEL, this.onWheel); + } + + if (options.toggleDragModeOnDblclick) { + removeListener(cropper, EVENT_DBLCLICK, this.onDblclick); + } + + removeListener(element.ownerDocument, EVENT_POINTER_MOVE, this.onCropMove); + removeListener(element.ownerDocument, EVENT_POINTER_UP, this.onCropEnd); + + if (options.responsive) { + removeListener(window, EVENT_RESIZE, this.onResize); + } + } +}; + +var handlers = { + resize: function resize() { + var options = this.options, + container = this.container, + containerData = this.containerData; + + var minContainerWidth = Number(options.minContainerWidth) || 200; + var minContainerHeight = Number(options.minContainerHeight) || 100; + + if (this.disabled || containerData.width <= minContainerWidth || containerData.height <= minContainerHeight) { + return; + } + + var ratio = container.offsetWidth / containerData.width; + + // Resize when width changed or height changed + if (ratio !== 1 || container.offsetHeight !== containerData.height) { + var canvasData = void 0; + var cropBoxData = void 0; + + if (options.restore) { + canvasData = this.getCanvasData(); + cropBoxData = this.getCropBoxData(); + } + + this.render(); + + if (options.restore) { + this.setCanvasData(forEach(canvasData, function (n, i) { + canvasData[i] = n * ratio; + })); + this.setCropBoxData(forEach(cropBoxData, function (n, i) { + cropBoxData[i] = n * ratio; + })); + } + } + }, + dblclick: function dblclick() { + if (this.disabled || this.options.dragMode === DRAG_MODE_NONE) { + return; + } + + this.setDragMode(hasClass(this.dragBox, CLASS_CROP) ? DRAG_MODE_MOVE : DRAG_MODE_CROP); + }, + wheel: function wheel(e) { + var _this = this; + + var ratio = Number(this.options.wheelZoomRatio) || 0.1; + var delta = 1; + + if (this.disabled) { + return; + } + + e.preventDefault(); + + // Limit wheel speed to prevent zoom too fast (#21) + if (this.wheeling) { + return; + } + + this.wheeling = true; + + setTimeout(function () { + _this.wheeling = false; + }, 50); + + if (e.deltaY) { + delta = e.deltaY > 0 ? 1 : -1; + } else if (e.wheelDelta) { + delta = -e.wheelDelta / 120; + } else if (e.detail) { + delta = e.detail > 0 ? 1 : -1; + } + + this.zoom(-delta * ratio, e); + }, + cropStart: function cropStart(e) { + if (this.disabled) { + return; + } + + var options = this.options, + pointers = this.pointers; + + var action = void 0; + + if (e.changedTouches) { + // Handle touch event + forEach(e.changedTouches, function (touch) { + pointers[touch.identifier] = getPointer(touch); + }); + } else { + // Handle mouse event and pointer event + pointers[e.pointerId || 0] = getPointer(e); + } + + if (Object.keys(pointers).length > 1 && options.zoomable && options.zoomOnTouch) { + action = ACTION_ZOOM; + } else { + action = getData(e.target, DATA_ACTION); + } + + if (!REGEXP_ACTIONS.test(action)) { + return; + } + + if (dispatchEvent(this.element, EVENT_CROP_START, { + originalEvent: e, + action: action + }) === false) { + return; + } + + e.preventDefault(); + + this.action = action; + this.cropping = false; + + if (action === ACTION_CROP) { + this.cropping = true; + addClass(this.dragBox, CLASS_MODAL); + } + }, + cropMove: function cropMove(e) { + var action = this.action; + + + if (this.disabled || !action) { + return; + } + + var pointers = this.pointers; + + + e.preventDefault(); + + if (dispatchEvent(this.element, EVENT_CROP_MOVE, { + originalEvent: e, + action: action + }) === false) { + return; + } + + if (e.changedTouches) { + forEach(e.changedTouches, function (touch) { + assign(pointers[touch.identifier], getPointer(touch, true)); + }); + } else { + assign(pointers[e.pointerId || 0], getPointer(e, true)); + } + + this.change(e); + }, + cropEnd: function cropEnd(e) { + if (this.disabled) { + return; + } + + var action = this.action, + pointers = this.pointers; + + + if (e.changedTouches) { + forEach(e.changedTouches, function (touch) { + delete pointers[touch.identifier]; + }); + } else { + delete pointers[e.pointerId || 0]; + } + + if (!action) { + return; + } + + e.preventDefault(); + + if (!Object.keys(pointers).length) { + this.action = ''; + } + + if (this.cropping) { + this.cropping = false; + toggleClass(this.dragBox, CLASS_MODAL, this.cropped && this.options.modal); + } + + dispatchEvent(this.element, EVENT_CROP_END, { + originalEvent: e, + action: action + }); + } +}; + +var change = { + change: function change(e) { + var options = this.options, + canvasData = this.canvasData, + containerData = this.containerData, + cropBoxData = this.cropBoxData, + pointers = this.pointers; + var action = this.action; + var aspectRatio = options.aspectRatio; + var left = cropBoxData.left, + top = cropBoxData.top, + width = cropBoxData.width, + height = cropBoxData.height; + + var right = left + width; + var bottom = top + height; + var minLeft = 0; + var minTop = 0; + var maxWidth = containerData.width; + var maxHeight = containerData.height; + var renderable = true; + var offset = void 0; + + // Locking aspect ratio in "free mode" by holding shift key + if (!aspectRatio && e.shiftKey) { + aspectRatio = width && height ? width / height : 1; + } + + if (this.limited) { + minLeft = cropBoxData.minLeft; + minTop = cropBoxData.minTop; + + maxWidth = minLeft + Math.min(containerData.width, canvasData.width, canvasData.left + canvasData.width); + maxHeight = minTop + Math.min(containerData.height, canvasData.height, canvasData.top + canvasData.height); + } + + var pointer = pointers[Object.keys(pointers)[0]]; + var range = { + x: pointer.endX - pointer.startX, + y: pointer.endY - pointer.startY + }; + var check = function check(side) { + switch (side) { + case ACTION_EAST: + if (right + range.x > maxWidth) { + range.x = maxWidth - right; + } + + break; + + case ACTION_WEST: + if (left + range.x < minLeft) { + range.x = minLeft - left; + } + + break; + + case ACTION_NORTH: + if (top + range.y < minTop) { + range.y = minTop - top; + } + + break; + + case ACTION_SOUTH: + if (bottom + range.y > maxHeight) { + range.y = maxHeight - bottom; + } + + break; + + default: + } + }; + + switch (action) { + // Move crop box + case ACTION_ALL: + left += range.x; + top += range.y; + break; + + // Resize crop box + case ACTION_EAST: + if (range.x >= 0 && (right >= maxWidth || aspectRatio && (top <= minTop || bottom >= maxHeight))) { + renderable = false; + break; + } + + check(ACTION_EAST); + width += range.x; + + if (aspectRatio) { + height = width / aspectRatio; + top -= range.x / aspectRatio / 2; + } + + if (width < 0) { + action = ACTION_WEST; + width = 0; + } + + break; + + case ACTION_NORTH: + if (range.y <= 0 && (top <= minTop || aspectRatio && (left <= minLeft || right >= maxWidth))) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + + if (aspectRatio) { + width = height * aspectRatio; + left += range.y * aspectRatio / 2; + } + + if (height < 0) { + action = ACTION_SOUTH; + height = 0; + } + + break; + + case ACTION_WEST: + if (range.x <= 0 && (left <= minLeft || aspectRatio && (top <= minTop || bottom >= maxHeight))) { + renderable = false; + break; + } + + check(ACTION_WEST); + width -= range.x; + left += range.x; + + if (aspectRatio) { + height = width / aspectRatio; + top += range.x / aspectRatio / 2; + } + + if (width < 0) { + action = ACTION_EAST; + width = 0; + } + + break; + + case ACTION_SOUTH: + if (range.y >= 0 && (bottom >= maxHeight || aspectRatio && (left <= minLeft || right >= maxWidth))) { + renderable = false; + break; + } + + check(ACTION_SOUTH); + height += range.y; + + if (aspectRatio) { + width = height * aspectRatio; + left -= range.y * aspectRatio / 2; + } + + if (height < 0) { + action = ACTION_NORTH; + height = 0; + } + + break; + + case ACTION_NORTH_EAST: + if (aspectRatio) { + if (range.y <= 0 && (top <= minTop || right >= maxWidth)) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + width = height * aspectRatio; + } else { + check(ACTION_NORTH); + check(ACTION_EAST); + + if (range.x >= 0) { + if (right < maxWidth) { + width += range.x; + } else if (range.y <= 0 && top <= minTop) { + renderable = false; + } + } else { + width += range.x; + } + + if (range.y <= 0) { + if (top > minTop) { + height -= range.y; + top += range.y; + } + } else { + height -= range.y; + top += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_SOUTH_WEST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_NORTH_WEST; + width = 0; + } else if (height < 0) { + action = ACTION_SOUTH_EAST; + height = 0; + } + + break; + + case ACTION_NORTH_WEST: + if (aspectRatio) { + if (range.y <= 0 && (top <= minTop || left <= minLeft)) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + width = height * aspectRatio; + left += range.y * aspectRatio; + } else { + check(ACTION_NORTH); + check(ACTION_WEST); + + if (range.x <= 0) { + if (left > minLeft) { + width -= range.x; + left += range.x; + } else if (range.y <= 0 && top <= minTop) { + renderable = false; + } + } else { + width -= range.x; + left += range.x; + } + + if (range.y <= 0) { + if (top > minTop) { + height -= range.y; + top += range.y; + } + } else { + height -= range.y; + top += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_SOUTH_EAST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_NORTH_EAST; + width = 0; + } else if (height < 0) { + action = ACTION_SOUTH_WEST; + height = 0; + } + + break; + + case ACTION_SOUTH_WEST: + if (aspectRatio) { + if (range.x <= 0 && (left <= minLeft || bottom >= maxHeight)) { + renderable = false; + break; + } + + check(ACTION_WEST); + width -= range.x; + left += range.x; + height = width / aspectRatio; + } else { + check(ACTION_SOUTH); + check(ACTION_WEST); + + if (range.x <= 0) { + if (left > minLeft) { + width -= range.x; + left += range.x; + } else if (range.y >= 0 && bottom >= maxHeight) { + renderable = false; + } + } else { + width -= range.x; + left += range.x; + } + + if (range.y >= 0) { + if (bottom < maxHeight) { + height += range.y; + } + } else { + height += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_NORTH_EAST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_SOUTH_EAST; + width = 0; + } else if (height < 0) { + action = ACTION_NORTH_WEST; + height = 0; + } + + break; + + case ACTION_SOUTH_EAST: + if (aspectRatio) { + if (range.x >= 0 && (right >= maxWidth || bottom >= maxHeight)) { + renderable = false; + break; + } + + check(ACTION_EAST); + width += range.x; + height = width / aspectRatio; + } else { + check(ACTION_SOUTH); + check(ACTION_EAST); + + if (range.x >= 0) { + if (right < maxWidth) { + width += range.x; + } else if (range.y >= 0 && bottom >= maxHeight) { + renderable = false; + } + } else { + width += range.x; + } + + if (range.y >= 0) { + if (bottom < maxHeight) { + height += range.y; + } + } else { + height += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_NORTH_WEST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_SOUTH_WEST; + width = 0; + } else if (height < 0) { + action = ACTION_NORTH_EAST; + height = 0; + } + + break; + + // Move canvas + case ACTION_MOVE: + this.move(range.x, range.y); + renderable = false; + break; + + // Zoom canvas + case ACTION_ZOOM: + this.zoom(getMaxZoomRatio(pointers), e); + renderable = false; + break; + + // Create crop box + case ACTION_CROP: + if (!range.x || !range.y) { + renderable = false; + break; + } + + offset = getOffset(this.cropper); + left = pointer.startX - offset.left; + top = pointer.startY - offset.top; + width = cropBoxData.minWidth; + height = cropBoxData.minHeight; + + if (range.x > 0) { + action = range.y > 0 ? ACTION_SOUTH_EAST : ACTION_NORTH_EAST; + } else if (range.x < 0) { + left -= width; + action = range.y > 0 ? ACTION_SOUTH_WEST : ACTION_NORTH_WEST; + } + + if (range.y < 0) { + top -= height; + } + + // Show the crop box if is hidden + if (!this.cropped) { + removeClass(this.cropBox, CLASS_HIDDEN); + this.cropped = true; + + if (this.limited) { + this.limitCropBox(true, true); + } + } + + break; + + default: + } + + if (renderable) { + cropBoxData.width = width; + cropBoxData.height = height; + cropBoxData.left = left; + cropBoxData.top = top; + this.action = action; + this.renderCropBox(); + } + + // Override + forEach(pointers, function (p) { + p.startX = p.endX; + p.startY = p.endY; + }); + } +}; + +var methods = { + // Show the crop box manually + crop: function crop() { + if (this.ready && !this.cropped && !this.disabled) { + this.cropped = true; + this.limitCropBox(true, true); + + if (this.options.modal) { + addClass(this.dragBox, CLASS_MODAL); + } + + removeClass(this.cropBox, CLASS_HIDDEN); + this.setCropBoxData(this.initialCropBoxData); + } + + return this; + }, + + + // Reset the image and crop box to their initial states + reset: function reset() { + if (this.ready && !this.disabled) { + this.imageData = assign({}, this.initialImageData); + this.canvasData = assign({}, this.initialCanvasData); + this.cropBoxData = assign({}, this.initialCropBoxData); + this.renderCanvas(); + + if (this.cropped) { + this.renderCropBox(); + } + } + + return this; + }, + + + // Clear the crop box + clear: function clear() { + if (this.cropped && !this.disabled) { + assign(this.cropBoxData, { + left: 0, + top: 0, + width: 0, + height: 0 + }); + + this.cropped = false; + this.renderCropBox(); + this.limitCanvas(true, true); + + // Render canvas after crop box rendered + this.renderCanvas(); + removeClass(this.dragBox, CLASS_MODAL); + addClass(this.cropBox, CLASS_HIDDEN); + } + + return this; + }, + + + /** + * Replace the image's src and rebuild the cropper + * @param {string} url - The new URL. + * @param {boolean} [hasSameSize] - Indicate if the new image has the same size as the old one. + * @returns {Cropper} this + */ + replace: function replace(url) { + var hasSameSize = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + if (!this.disabled && url) { + if (this.isImg) { + this.element.src = url; + } + + if (hasSameSize) { + this.url = url; + this.image.src = url; + + if (this.ready) { + this.viewBoxImage.src = url; + + forEach(this.previews, function (element) { + element.getElementsByTagName('img')[0].src = url; + }); + } + } else { + if (this.isImg) { + this.replaced = true; + } + + this.options.data = null; + this.uncreate(); + this.load(url); + } + } + + return this; + }, + + + // Enable (unfreeze) the cropper + enable: function enable() { + if (this.ready && this.disabled) { + this.disabled = false; + removeClass(this.cropper, CLASS_DISABLED); + } + + return this; + }, + + + // Disable (freeze) the cropper + disable: function disable() { + if (this.ready && !this.disabled) { + this.disabled = true; + addClass(this.cropper, CLASS_DISABLED); + } + + return this; + }, + + + /** + * Destroy the cropper and remove the instance from the image + * @returns {Cropper} this + */ + destroy: function destroy() { + var element = this.element; + + + if (!getData(element, NAMESPACE)) { + return this; + } + + if (this.isImg && this.replaced) { + element.src = this.originalUrl; + } + + this.uncreate(); + removeData(element, NAMESPACE); + + return this; + }, + + + /** + * Move the canvas with relative offsets + * @param {number} offsetX - The relative offset distance on the x-axis. + * @param {number} [offsetY=offsetX] - The relative offset distance on the y-axis. + * @returns {Cropper} this + */ + move: function move(offsetX) { + var offsetY = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : offsetX; + var _canvasData = this.canvasData, + left = _canvasData.left, + top = _canvasData.top; + + + return this.moveTo(isUndefined(offsetX) ? offsetX : left + Number(offsetX), isUndefined(offsetY) ? offsetY : top + Number(offsetY)); + }, + + + /** + * Move the canvas to an absolute point + * @param {number} x - The x-axis coordinate. + * @param {number} [y=x] - The y-axis coordinate. + * @returns {Cropper} this + */ + moveTo: function moveTo(x) { + var y = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : x; + var canvasData = this.canvasData; + + var changed = false; + + x = Number(x); + y = Number(y); + + if (this.ready && !this.disabled && this.options.movable) { + if (isNumber(x)) { + canvasData.left = x; + changed = true; + } + + if (isNumber(y)) { + canvasData.top = y; + changed = true; + } + + if (changed) { + this.renderCanvas(true); + } + } + + return this; + }, + + + /** + * Zoom the canvas with a relative ratio + * @param {number} ratio - The target ratio. + * @param {Event} _originalEvent - The original event if any. + * @returns {Cropper} this + */ + zoom: function zoom(ratio, _originalEvent) { + var canvasData = this.canvasData; + + + ratio = Number(ratio); + + if (ratio < 0) { + ratio = 1 / (1 - ratio); + } else { + ratio = 1 + ratio; + } + + return this.zoomTo(canvasData.width * ratio / canvasData.naturalWidth, null, _originalEvent); + }, + + + /** + * Zoom the canvas to an absolute ratio + * @param {number} ratio - The target ratio. + * @param {Object} pivot - The zoom pivot point coordinate. + * @param {Event} _originalEvent - The original event if any. + * @returns {Cropper} this + */ + zoomTo: function zoomTo(ratio, pivot, _originalEvent) { + var options = this.options, + canvasData = this.canvasData; + var width = canvasData.width, + height = canvasData.height, + naturalWidth = canvasData.naturalWidth, + naturalHeight = canvasData.naturalHeight; + + + ratio = Number(ratio); + + if (ratio >= 0 && this.ready && !this.disabled && options.zoomable) { + var newWidth = naturalWidth * ratio; + var newHeight = naturalHeight * ratio; + + if (dispatchEvent(this.element, EVENT_ZOOM, { + originalEvent: _originalEvent, + oldRatio: width / naturalWidth, + ratio: newWidth / naturalWidth + }) === false) { + return this; + } + + if (_originalEvent) { + var pointers = this.pointers; + + var offset = getOffset(this.cropper); + var center = pointers && Object.keys(pointers).length ? getPointersCenter(pointers) : { + pageX: _originalEvent.pageX, + pageY: _originalEvent.pageY + }; + + // Zoom from the triggering point of the event + canvasData.left -= (newWidth - width) * ((center.pageX - offset.left - canvasData.left) / width); + canvasData.top -= (newHeight - height) * ((center.pageY - offset.top - canvasData.top) / height); + } else if (isPlainObject(pivot) && isNumber(pivot.x) && isNumber(pivot.y)) { + canvasData.left -= (newWidth - width) * ((pivot.x - canvasData.left) / width); + canvasData.top -= (newHeight - height) * ((pivot.y - canvasData.top) / height); + } else { + // Zoom from the center of the canvas + canvasData.left -= (newWidth - width) / 2; + canvasData.top -= (newHeight - height) / 2; + } + + canvasData.width = newWidth; + canvasData.height = newHeight; + this.renderCanvas(true); + } + + return this; + }, + + + /** + * Rotate the canvas with a relative degree + * @param {number} degree - The rotate degree. + * @returns {Cropper} this + */ + rotate: function rotate(degree) { + return this.rotateTo((this.imageData.rotate || 0) + Number(degree)); + }, + + + /** + * Rotate the canvas to an absolute degree + * @param {number} degree - The rotate degree. + * @returns {Cropper} this + */ + rotateTo: function rotateTo(degree) { + degree = Number(degree); + + if (isNumber(degree) && this.ready && !this.disabled && this.options.rotatable) { + this.imageData.rotate = degree % 360; + this.renderCanvas(true, true); + } + + return this; + }, + + + /** + * Scale the image on the x-axis. + * @param {number} scaleX - The scale ratio on the x-axis. + * @returns {Cropper} this + */ + scaleX: function scaleX(_scaleX) { + var scaleY = this.imageData.scaleY; + + + return this.scale(_scaleX, isNumber(scaleY) ? scaleY : 1); + }, + + + /** + * Scale the image on the y-axis. + * @param {number} scaleY - The scale ratio on the y-axis. + * @returns {Cropper} this + */ + scaleY: function scaleY(_scaleY) { + var scaleX = this.imageData.scaleX; + + + return this.scale(isNumber(scaleX) ? scaleX : 1, _scaleY); + }, + + + /** + * Scale the image + * @param {number} scaleX - The scale ratio on the x-axis. + * @param {number} [scaleY=scaleX] - The scale ratio on the y-axis. + * @returns {Cropper} this + */ + scale: function scale(scaleX) { + var scaleY = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : scaleX; + var imageData = this.imageData; + + var transformed = false; + + scaleX = Number(scaleX); + scaleY = Number(scaleY); + + if (this.ready && !this.disabled && this.options.scalable) { + if (isNumber(scaleX)) { + imageData.scaleX = scaleX; + transformed = true; + } + + if (isNumber(scaleY)) { + imageData.scaleY = scaleY; + transformed = true; + } + + if (transformed) { + this.renderCanvas(true, true); + } + } + + return this; + }, + + + /** + * Get the cropped area position and size data (base on the original image) + * @param {boolean} [rounded=false] - Indicate if round the data values or not. + * @returns {Object} The result cropped data. + */ + getData: function getData$$1() { + var rounded = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; + var options = this.options, + imageData = this.imageData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + + var data = void 0; + + if (this.ready && this.cropped) { + data = { + x: cropBoxData.left - canvasData.left, + y: cropBoxData.top - canvasData.top, + width: cropBoxData.width, + height: cropBoxData.height + }; + + var ratio = imageData.width / imageData.naturalWidth; + + forEach(data, function (n, i) { + n /= ratio; + data[i] = rounded ? Math.round(n) : n; + }); + } else { + data = { + x: 0, + y: 0, + width: 0, + height: 0 + }; + } + + if (options.rotatable) { + data.rotate = imageData.rotate || 0; + } + + if (options.scalable) { + data.scaleX = imageData.scaleX || 1; + data.scaleY = imageData.scaleY || 1; + } + + return data; + }, + + + /** + * Set the cropped area position and size with new data + * @param {Object} data - The new data. + * @returns {Cropper} this + */ + setData: function setData$$1(data) { + var options = this.options, + imageData = this.imageData, + canvasData = this.canvasData; + + var cropBoxData = {}; + + if (this.ready && !this.disabled && isPlainObject(data)) { + var transformed = false; + + if (options.rotatable) { + if (isNumber(data.rotate) && data.rotate !== imageData.rotate) { + imageData.rotate = data.rotate; + transformed = true; + } + } + + if (options.scalable) { + if (isNumber(data.scaleX) && data.scaleX !== imageData.scaleX) { + imageData.scaleX = data.scaleX; + transformed = true; + } + + if (isNumber(data.scaleY) && data.scaleY !== imageData.scaleY) { + imageData.scaleY = data.scaleY; + transformed = true; + } + } + + if (transformed) { + this.renderCanvas(true, true); + } + + var ratio = imageData.width / imageData.naturalWidth; + + if (isNumber(data.x)) { + cropBoxData.left = data.x * ratio + canvasData.left; + } + + if (isNumber(data.y)) { + cropBoxData.top = data.y * ratio + canvasData.top; + } + + if (isNumber(data.width)) { + cropBoxData.width = data.width * ratio; + } + + if (isNumber(data.height)) { + cropBoxData.height = data.height * ratio; + } + + this.setCropBoxData(cropBoxData); + } + + return this; + }, + + + /** + * Get the container size data. + * @returns {Object} The result container data. + */ + getContainerData: function getContainerData() { + return this.ready ? assign({}, this.containerData) : {}; + }, + + + /** + * Get the image position and size data. + * @returns {Object} The result image data. + */ + getImageData: function getImageData() { + return this.sized ? assign({}, this.imageData) : {}; + }, + + + /** + * Get the canvas position and size data. + * @returns {Object} The result canvas data. + */ + getCanvasData: function getCanvasData() { + var canvasData = this.canvasData; + + var data = {}; + + if (this.ready) { + forEach(['left', 'top', 'width', 'height', 'naturalWidth', 'naturalHeight'], function (n) { + data[n] = canvasData[n]; + }); + } + + return data; + }, + + + /** + * Set the canvas position and size with new data. + * @param {Object} data - The new canvas data. + * @returns {Cropper} this + */ + setCanvasData: function setCanvasData(data) { + var canvasData = this.canvasData; + var aspectRatio = canvasData.aspectRatio; + + + if (this.ready && !this.disabled && isPlainObject(data)) { + if (isNumber(data.left)) { + canvasData.left = data.left; + } + + if (isNumber(data.top)) { + canvasData.top = data.top; + } + + if (isNumber(data.width)) { + canvasData.width = data.width; + canvasData.height = data.width / aspectRatio; + } else if (isNumber(data.height)) { + canvasData.height = data.height; + canvasData.width = data.height * aspectRatio; + } + + this.renderCanvas(true); + } + + return this; + }, + + + /** + * Get the crop box position and size data. + * @returns {Object} The result crop box data. + */ + getCropBoxData: function getCropBoxData() { + var cropBoxData = this.cropBoxData; + + var data = void 0; + + if (this.ready && this.cropped) { + data = { + left: cropBoxData.left, + top: cropBoxData.top, + width: cropBoxData.width, + height: cropBoxData.height + }; + } + + return data || {}; + }, + + + /** + * Set the crop box position and size with new data. + * @param {Object} data - The new crop box data. + * @returns {Cropper} this + */ + setCropBoxData: function setCropBoxData(data) { + var cropBoxData = this.cropBoxData; + var aspectRatio = this.options.aspectRatio; + + var widthChanged = void 0; + var heightChanged = void 0; + + if (this.ready && this.cropped && !this.disabled && isPlainObject(data)) { + if (isNumber(data.left)) { + cropBoxData.left = data.left; + } + + if (isNumber(data.top)) { + cropBoxData.top = data.top; + } + + if (isNumber(data.width) && data.width !== cropBoxData.width) { + widthChanged = true; + cropBoxData.width = data.width; + } + + if (isNumber(data.height) && data.height !== cropBoxData.height) { + heightChanged = true; + cropBoxData.height = data.height; + } + + if (aspectRatio) { + if (widthChanged) { + cropBoxData.height = cropBoxData.width / aspectRatio; + } else if (heightChanged) { + cropBoxData.width = cropBoxData.height * aspectRatio; + } + } + + this.renderCropBox(); + } + + return this; + }, + + + /** + * Get a canvas drawn the cropped image. + * @param {Object} [options={}] - The config options. + * @returns {HTMLCanvasElement} - The result canvas. + */ + getCroppedCanvas: function getCroppedCanvas() { + var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + + if (!this.ready || !window.HTMLCanvasElement) { + return null; + } + + var canvasData = this.canvasData; + + var source = getSourceCanvas(this.image, this.imageData, canvasData, options); + + // Returns the source canvas if it is not cropped. + if (!this.cropped) { + return source; + } + + var _getData = this.getData(), + initialX = _getData.x, + initialY = _getData.y, + initialWidth = _getData.width, + initialHeight = _getData.height; + + var ratio = source.width / Math.floor(canvasData.naturalWidth); + + if (ratio !== 1) { + initialX *= ratio; + initialY *= ratio; + initialWidth *= ratio; + initialHeight *= ratio; + } + + var aspectRatio = initialWidth / initialHeight; + var maxSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.maxWidth || Infinity, + height: options.maxHeight || Infinity + }); + var minSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.minWidth || 0, + height: options.minHeight || 0 + }, 'cover'); + + var _getAdjustedSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.width || (ratio !== 1 ? source.width : initialWidth), + height: options.height || (ratio !== 1 ? source.height : initialHeight) + }), + width = _getAdjustedSizes.width, + height = _getAdjustedSizes.height; + + width = Math.min(maxSizes.width, Math.max(minSizes.width, width)); + height = Math.min(maxSizes.height, Math.max(minSizes.height, height)); + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + + canvas.width = normalizeDecimalNumber(width); + canvas.height = normalizeDecimalNumber(height); + + context.fillStyle = options.fillColor || 'transparent'; + context.fillRect(0, 0, width, height); + + var _options$imageSmoothi = options.imageSmoothingEnabled, + imageSmoothingEnabled = _options$imageSmoothi === undefined ? true : _options$imageSmoothi, + imageSmoothingQuality = options.imageSmoothingQuality; + + + context.imageSmoothingEnabled = imageSmoothingEnabled; + + if (imageSmoothingQuality) { + context.imageSmoothingQuality = imageSmoothingQuality; + } + + // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D.drawImage + var sourceWidth = source.width; + var sourceHeight = source.height; + + // Source canvas parameters + var srcX = initialX; + var srcY = initialY; + var srcWidth = void 0; + var srcHeight = void 0; + + // Destination canvas parameters + var dstX = void 0; + var dstY = void 0; + var dstWidth = void 0; + var dstHeight = void 0; + + if (srcX <= -initialWidth || srcX > sourceWidth) { + srcX = 0; + srcWidth = 0; + dstX = 0; + dstWidth = 0; + } else if (srcX <= 0) { + dstX = -srcX; + srcX = 0; + srcWidth = Math.min(sourceWidth, initialWidth + srcX); + dstWidth = srcWidth; + } else if (srcX <= sourceWidth) { + dstX = 0; + srcWidth = Math.min(initialWidth, sourceWidth - srcX); + dstWidth = srcWidth; + } + + if (srcWidth <= 0 || srcY <= -initialHeight || srcY > sourceHeight) { + srcY = 0; + srcHeight = 0; + dstY = 0; + dstHeight = 0; + } else if (srcY <= 0) { + dstY = -srcY; + srcY = 0; + srcHeight = Math.min(sourceHeight, initialHeight + srcY); + dstHeight = srcHeight; + } else if (srcY <= sourceHeight) { + dstY = 0; + srcHeight = Math.min(initialHeight, sourceHeight - srcY); + dstHeight = srcHeight; + } + + var params = [srcX, srcY, srcWidth, srcHeight]; + + // Avoid "IndexSizeError" + if (dstWidth > 0 && dstHeight > 0) { + var scale = width / initialWidth; + + params.push(dstX * scale, dstY * scale, dstWidth * scale, dstHeight * scale); + } + + // All the numerical parameters should be integer for `drawImage` + // https://github.com/fengyuanchen/cropper/issues/476 + context.drawImage.apply(context, [source].concat(toConsumableArray(params.map(function (param) { + return Math.floor(normalizeDecimalNumber(param)); + })))); + + return canvas; + }, + + + /** + * Change the aspect ratio of the crop box. + * @param {number} aspectRatio - The new aspect ratio. + * @returns {Cropper} this + */ + setAspectRatio: function setAspectRatio(aspectRatio) { + var options = this.options; + + + if (!this.disabled && !isUndefined(aspectRatio)) { + // 0 -> NaN + options.aspectRatio = Math.max(0, aspectRatio) || NaN; + + if (this.ready) { + this.initCropBox(); + + if (this.cropped) { + this.renderCropBox(); + } + } + } + + return this; + }, + + + /** + * Change the drag mode. + * @param {string} mode - The new drag mode. + * @returns {Cropper} this + */ + setDragMode: function setDragMode(mode) { + var options = this.options, + dragBox = this.dragBox, + face = this.face; + + + if (this.ready && !this.disabled) { + var croppable = mode === DRAG_MODE_CROP; + var movable = options.movable && mode === DRAG_MODE_MOVE; + + mode = croppable || movable ? mode : DRAG_MODE_NONE; + + options.dragMode = mode; + setData(dragBox, DATA_ACTION, mode); + toggleClass(dragBox, CLASS_CROP, croppable); + toggleClass(dragBox, CLASS_MOVE, movable); + + if (!options.cropBoxMovable) { + // Sync drag mode to crop box when it is not movable + setData(face, DATA_ACTION, mode); + toggleClass(face, CLASS_CROP, croppable); + toggleClass(face, CLASS_MOVE, movable); + } + } + + return this; + } +}; + +var AnotherCropper = WINDOW.Cropper; + +var Cropper = function () { + /** + * Create a new Cropper. + * @param {Element} element - The target element for cropping. + * @param {Object} [options={}] - The configuration options. + */ + function Cropper(element) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + classCallCheck(this, Cropper); + + if (!element || !REGEXP_TAG_NAME.test(element.tagName)) { + throw new Error('The first argument is required and must be an or element.'); + } + + this.element = element; + this.options = assign({}, DEFAULTS, isPlainObject(options) && options); + this.cropped = false; + this.disabled = false; + this.pointers = {}; + this.ready = false; + this.reloading = false; + this.replaced = false; + this.sized = false; + this.sizing = false; + this.init(); + } + + createClass(Cropper, [{ + key: 'init', + value: function init() { + var element = this.element; + + var tagName = element.tagName.toLowerCase(); + var url = void 0; + + if (getData(element, NAMESPACE)) { + return; + } + + setData(element, NAMESPACE, this); + + if (tagName === 'img') { + this.isImg = true; + + // e.g.: "img/picture.jpg" + url = element.getAttribute('src') || ''; + this.originalUrl = url; + + // Stop when it's a blank image + if (!url) { + return; + } + + // e.g.: "http://example.com/img/picture.jpg" + url = element.src; + } else if (tagName === 'canvas' && window.HTMLCanvasElement) { + url = element.toDataURL(); + } + + this.load(url); + } + }, { + key: 'load', + value: function load(url) { + var _this = this; + + if (!url) { + return; + } + + this.url = url; + this.imageData = {}; + + var element = this.element, + options = this.options; + + + if (!options.checkOrientation || !window.ArrayBuffer) { + this.clone(); + return; + } + + // XMLHttpRequest disallows to open a Data URL in some browsers like IE11 and Safari + if (REGEXP_DATA_URL.test(url)) { + if (REGEXP_DATA_URL_JPEG.test(url)) { + this.read(dataURLToArrayBuffer(url)); + } else { + this.clone(); + } + + return; + } + + var xhr = new XMLHttpRequest(); + + this.reloading = true; + this.xhr = xhr; + + var done = function done() { + _this.reloading = false; + _this.xhr = null; + }; + + xhr.ontimeout = done; + xhr.onabort = done; + xhr.onerror = function () { + done(); + _this.clone(); + }; + + xhr.onload = function () { + done(); + _this.read(xhr.response); + }; + + // Bust cache when there is a "crossOrigin" property + if (options.checkCrossOrigin && isCrossOriginURL(url) && element.crossOrigin) { + url = addTimestamp(url); + } + + xhr.open('get', url); + xhr.responseType = 'arraybuffer'; + xhr.withCredentials = element.crossOrigin === 'use-credentials'; + xhr.send(); + } + }, { + key: 'read', + value: function read(arrayBuffer) { + var options = this.options, + imageData = this.imageData; + + var orientation = getOrientation(arrayBuffer); + var rotate = 0; + var scaleX = 1; + var scaleY = 1; + + if (orientation > 1) { + this.url = arrayBufferToDataURL(arrayBuffer, 'image/jpeg'); + + var _parseOrientation = parseOrientation(orientation); + + rotate = _parseOrientation.rotate; + scaleX = _parseOrientation.scaleX; + scaleY = _parseOrientation.scaleY; + } + + if (options.rotatable) { + imageData.rotate = rotate; + } + + if (options.scalable) { + imageData.scaleX = scaleX; + imageData.scaleY = scaleY; + } + + this.clone(); + } + }, { + key: 'clone', + value: function clone() { + var element = this.element, + url = this.url; + + var crossOrigin = void 0; + var crossOriginUrl = void 0; + + if (this.options.checkCrossOrigin && isCrossOriginURL(url)) { + crossOrigin = element.crossOrigin; + + + if (crossOrigin) { + crossOriginUrl = url; + } else { + crossOrigin = 'anonymous'; + + // Bust cache when there is not a "crossOrigin" property + crossOriginUrl = addTimestamp(url); + } + } + + this.crossOrigin = crossOrigin; + this.crossOriginUrl = crossOriginUrl; + + var image = document.createElement('img'); + + if (crossOrigin) { + image.crossOrigin = crossOrigin; + } + + image.src = crossOriginUrl || url; + + var start = this.start.bind(this); + var stop = this.stop.bind(this); + + this.image = image; + this.onStart = start; + this.onStop = stop; + + if (this.isImg) { + if (element.complete) { + // start asynchronously to keep `this.cropper` is accessible in `ready` event handler. + this.timeout = setTimeout(start, 0); + } else { + addListener(element, EVENT_LOAD, start, { + once: true + }); + } + } else { + image.onload = start; + image.onerror = stop; + addClass(image, CLASS_HIDE); + element.parentNode.insertBefore(image, element.nextSibling); + } + } + }, { + key: 'start', + value: function start(event) { + var _this2 = this; + + var image = this.isImg ? this.element : this.image; + + if (event) { + image.onload = null; + image.onerror = null; + } + + this.sizing = true; + + var IS_SAFARI = WINDOW.navigator && /(Macintosh|iPhone|iPod|iPad).*AppleWebKit/i.test(WINDOW.navigator.userAgent); + var done = function done(naturalWidth, naturalHeight) { + assign(_this2.imageData, { + naturalWidth: naturalWidth, + naturalHeight: naturalHeight, + aspectRatio: naturalWidth / naturalHeight + }); + _this2.sizing = false; + _this2.sized = true; + _this2.build(); + }; + + // Modern browsers (except Safari) + if (image.naturalWidth && !IS_SAFARI) { + done(image.naturalWidth, image.naturalHeight); + return; + } + + var sizingImage = document.createElement('img'); + var body = document.body || document.documentElement; + + this.sizingImage = sizingImage; + + sizingImage.onload = function () { + done(sizingImage.width, sizingImage.height); + + if (!IS_SAFARI) { + body.removeChild(sizingImage); + } + }; + + sizingImage.src = image.src; + + // iOS Safari will convert the image automatically + // with its orientation once append it into DOM (#279) + if (!IS_SAFARI) { + sizingImage.style.cssText = 'left:0;' + 'max-height:none!important;' + 'max-width:none!important;' + 'min-height:0!important;' + 'min-width:0!important;' + 'opacity:0;' + 'position:absolute;' + 'top:0;' + 'z-index:-1;'; + body.appendChild(sizingImage); + } + } + }, { + key: 'stop', + value: function stop() { + var image = this.image; + + + image.onload = null; + image.onerror = null; + image.parentNode.removeChild(image); + this.image = null; + } + }, { + key: 'build', + value: function build() { + if (!this.sized || this.ready) { + return; + } + + var element = this.element, + options = this.options, + image = this.image; + + // Create cropper elements + + var container = element.parentNode; + var template = document.createElement('div'); + + template.innerHTML = TEMPLATE; + + var cropper = template.querySelector('.' + NAMESPACE + '-container'); + var canvas = cropper.querySelector('.' + NAMESPACE + '-canvas'); + var dragBox = cropper.querySelector('.' + NAMESPACE + '-drag-box'); + var cropBox = cropper.querySelector('.' + NAMESPACE + '-crop-box'); + var face = cropBox.querySelector('.' + NAMESPACE + '-face'); + + this.container = container; + this.cropper = cropper; + this.canvas = canvas; + this.dragBox = dragBox; + this.cropBox = cropBox; + this.viewBox = cropper.querySelector('.' + NAMESPACE + '-view-box'); + this.face = face; + + canvas.appendChild(image); + + // Hide the original image + addClass(element, CLASS_HIDDEN); + + // Inserts the cropper after to the current image + container.insertBefore(cropper, element.nextSibling); + + // Show the image if is hidden + if (!this.isImg) { + removeClass(image, CLASS_HIDE); + } + + this.initPreview(); + this.bind(); + + options.aspectRatio = Math.max(0, options.aspectRatio) || NaN; + options.viewMode = Math.max(0, Math.min(3, Math.round(options.viewMode))) || 0; + + addClass(cropBox, CLASS_HIDDEN); + + if (!options.guides) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-dashed'), CLASS_HIDDEN); + } + + if (!options.center) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-center'), CLASS_HIDDEN); + } + + if (options.background) { + addClass(cropper, NAMESPACE + '-bg'); + } + + if (!options.highlight) { + addClass(face, CLASS_INVISIBLE); + } + + if (options.cropBoxMovable) { + addClass(face, CLASS_MOVE); + setData(face, DATA_ACTION, ACTION_ALL); + } + + if (!options.cropBoxResizable) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-line'), CLASS_HIDDEN); + addClass(cropBox.getElementsByClassName(NAMESPACE + '-point'), CLASS_HIDDEN); + } + + this.render(); + this.ready = true; + this.setDragMode(options.dragMode); + + if (options.autoCrop) { + this.crop(); + } + + this.setData(options.data); + + if (isFunction(options.ready)) { + addListener(element, EVENT_READY, options.ready, { + once: true + }); + } + + dispatchEvent(element, EVENT_READY); + } + }, { + key: 'unbuild', + value: function unbuild() { + if (!this.ready) { + return; + } + + this.ready = false; + this.unbind(); + this.resetPreview(); + this.cropper.parentNode.removeChild(this.cropper); + removeClass(this.element, CLASS_HIDDEN); + } + }, { + key: 'uncreate', + value: function uncreate() { + var element = this.element; + + + if (this.ready) { + this.unbuild(); + this.ready = false; + this.cropped = false; + } else if (this.sizing) { + this.sizingImage.onload = null; + this.sizing = false; + this.sized = false; + } else if (this.reloading) { + this.xhr.abort(); + } else if (this.isImg) { + if (element.complete) { + clearTimeout(this.timeout); + } else { + removeListener(element, EVENT_LOAD, this.onStart); + } + } else if (this.image) { + this.stop(); + } + } + + /** + * Get the no conflict cropper class. + * @returns {Cropper} The cropper class. + */ + + }], [{ + key: 'noConflict', + value: function noConflict() { + window.Cropper = AnotherCropper; + return Cropper; + } + + /** + * Change the default options. + * @param {Object} options - The new default options. + */ + + }, { + key: 'setDefaults', + value: function setDefaults(options) { + assign(DEFAULTS, isPlainObject(options) && options); + } + }]); + return Cropper; +}(); + +assign(Cropper.prototype, render, preview, events, handlers, change, methods); + +if ($.fn) { + var AnotherCropper$1 = $.fn.cropper; + var NAMESPACE$1 = 'cropper'; + + $.fn.cropper = function jQueryCropper(option) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + var result = void 0; + + this.each(function (i, element) { + var $element = $(element); + var isDestroy = option === 'destroy'; + var cropper = $element.data(NAMESPACE$1); + + if (!cropper) { + if (isDestroy) { + return; + } + + var options = $.extend({}, $element.data(), $.isPlainObject(option) && option); + + cropper = new Cropper(element, options); + $element.data(NAMESPACE$1, cropper); + } + + if (typeof option === 'string') { + var fn = cropper[option]; + + if ($.isFunction(fn)) { + result = fn.apply(cropper, args); + + if (result === cropper) { + result = undefined; + } + + if (isDestroy) { + $element.removeData(NAMESPACE$1); + } + } + } + }); + + return result !== undefined ? result : this; + }; + + $.fn.cropper.Constructor = Cropper; + $.fn.cropper.setDefaults = Cropper.setDefaults; + $.fn.cropper.noConflict = function noConflict() { + $.fn.cropper = AnotherCropper$1; + return this; + }; +} diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.css b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.css new file mode 100644 index 0000000000000000000000000000000000000000..b5b8b413a08eca14c02589801b6ffed83b53fbfc --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.css @@ -0,0 +1,305 @@ +/*! + * Cropper v4.0.0 + * https://github.com/fengyuanchen/cropper + * + * Copyright (c) 2014-2018 Chen Fengyuan + * Released under the MIT license + * + * Date: 2018-04-01T06:26:32.417Z + */ + +.cropper-container { + direction: ltr; + font-size: 0; + line-height: 0; + position: relative; + -ms-touch-action: none; + touch-action: none; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +.cropper-container img {/*Avoid margin top issue (Occur only when margin-top <= -height) + */ + display: block; + height: 100%; + image-orientation: 0deg; + max-height: none !important; + max-width: none !important; + min-height: 0 !important; + min-width: 0 !important; + width: 100%; +} + +.cropper-wrap-box, +.cropper-canvas, +.cropper-drag-box, +.cropper-crop-box, +.cropper-modal { + bottom: 0; + left: 0; + position: absolute; + right: 0; + top: 0; +} + +.cropper-wrap-box, +.cropper-canvas { + overflow: hidden; +} + +.cropper-drag-box { + background-color: #fff; + opacity: 0; +} + +.cropper-modal { + background-color: #000; + opacity: .5; +} + +.cropper-view-box { + display: block; + height: 100%; + outline-color: rgba(51, 153, 255, 0.75); + outline: 1px solid #39f; + overflow: hidden; + width: 100%; +} + +.cropper-dashed { + border: 0 dashed #eee; + display: block; + opacity: .5; + position: absolute; +} + +.cropper-dashed.dashed-h { + border-bottom-width: 1px; + border-top-width: 1px; + height: 33.33333%; + left: 0; + top: 33.33333%; + width: 100%; +} + +.cropper-dashed.dashed-v { + border-left-width: 1px; + border-right-width: 1px; + height: 100%; + left: 33.33333%; + top: 0; + width: 33.33333%; +} + +.cropper-center { + display: block; + height: 0; + left: 50%; + opacity: .75; + position: absolute; + top: 50%; + width: 0; +} + +.cropper-center:before, +.cropper-center:after { + background-color: #eee; + content: ' '; + display: block; + position: absolute; +} + +.cropper-center:before { + height: 1px; + left: -3px; + top: 0; + width: 7px; +} + +.cropper-center:after { + height: 7px; + left: 0; + top: -3px; + width: 1px; +} + +.cropper-face, +.cropper-line, +.cropper-point { + display: block; + height: 100%; + opacity: .1; + position: absolute; + width: 100%; +} + +.cropper-face { + background-color: #fff; + left: 0; + top: 0; +} + +.cropper-line { + background-color: #39f; +} + +.cropper-line.line-e { + cursor: ew-resize; + right: -3px; + top: 0; + width: 5px; +} + +.cropper-line.line-n { + cursor: ns-resize; + height: 5px; + left: 0; + top: -3px; +} + +.cropper-line.line-w { + cursor: ew-resize; + left: -3px; + top: 0; + width: 5px; +} + +.cropper-line.line-s { + bottom: -3px; + cursor: ns-resize; + height: 5px; + left: 0; +} + +.cropper-point { + background-color: #39f; + height: 5px; + opacity: .75; + width: 5px; +} + +.cropper-point.point-e { + cursor: ew-resize; + margin-top: -3px; + right: -3px; + top: 50%; +} + +.cropper-point.point-n { + cursor: ns-resize; + left: 50%; + margin-left: -3px; + top: -3px; +} + +.cropper-point.point-w { + cursor: ew-resize; + left: -3px; + margin-top: -3px; + top: 50%; +} + +.cropper-point.point-s { + bottom: -3px; + cursor: s-resize; + left: 50%; + margin-left: -3px; +} + +.cropper-point.point-ne { + cursor: nesw-resize; + right: -3px; + top: -3px; +} + +.cropper-point.point-nw { + cursor: nwse-resize; + left: -3px; + top: -3px; +} + +.cropper-point.point-sw { + bottom: -3px; + cursor: nesw-resize; + left: -3px; +} + +.cropper-point.point-se { + bottom: -3px; + cursor: nwse-resize; + height: 20px; + opacity: 1; + right: -3px; + width: 20px; +} + +@media (min-width: 768px) { + .cropper-point.point-se { + height: 15px; + width: 15px; + } +} + +@media (min-width: 992px) { + .cropper-point.point-se { + height: 10px; + width: 10px; + } +} + +@media (min-width: 1200px) { + .cropper-point.point-se { + height: 5px; + opacity: .75; + width: 5px; + } +} + +.cropper-point.point-se:before { + background-color: #39f; + bottom: -50%; + content: ' '; + display: block; + height: 200%; + opacity: 0; + position: absolute; + right: -50%; + width: 200%; +} + +.cropper-invisible { + opacity: 0; +} + +.cropper-bg { + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAQMAAAAlPW0iAAAAA3NCSVQICAjb4U/gAAAABlBMVEXMzMz////TjRV2AAAACXBIWXMAAArrAAAK6wGCiw1aAAAAHHRFWHRTb2Z0d2FyZQBBZG9iZSBGaXJld29ya3MgQ1M26LyyjAAAABFJREFUCJlj+M/AgBVhF/0PAH6/D/HkDxOGAAAAAElFTkSuQmCC'); +} + +.cropper-hide { + display: block; + height: 0; + position: absolute; + width: 0; +} + +.cropper-hidden { + display: none !important; +} + +.cropper-move { + cursor: move; +} + +.cropper-crop { + cursor: crosshair; +} + +.cropper-disabled .cropper-drag-box, +.cropper-disabled .cropper-face, +.cropper-disabled .cropper-line, +.cropper-disabled .cropper-point { + cursor: not-allowed; +} diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.esm.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.esm.js new file mode 100644 index 0000000000000000000000000000000000000000..758c2dfbf77ddc8cd76a12f09937f43725f5fbfc --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.esm.js @@ -0,0 +1,3753 @@ +/*! + * Cropper v4.0.0 + * https://github.com/fengyuanchen/cropper + * + * Copyright (c) 2014-2018 Chen Fengyuan + * Released under the MIT license + * + * Date: 2018-04-01T06:27:27.267Z + */ + +import $ from 'jquery'; + +var IN_BROWSER = typeof window !== 'undefined'; +var WINDOW = IN_BROWSER ? window : {}; +var NAMESPACE = 'cropper'; + +// Actions +var ACTION_ALL = 'all'; +var ACTION_CROP = 'crop'; +var ACTION_MOVE = 'move'; +var ACTION_ZOOM = 'zoom'; +var ACTION_EAST = 'e'; +var ACTION_WEST = 'w'; +var ACTION_SOUTH = 's'; +var ACTION_NORTH = 'n'; +var ACTION_NORTH_EAST = 'ne'; +var ACTION_NORTH_WEST = 'nw'; +var ACTION_SOUTH_EAST = 'se'; +var ACTION_SOUTH_WEST = 'sw'; + +// Classes +var CLASS_CROP = NAMESPACE + '-crop'; +var CLASS_DISABLED = NAMESPACE + '-disabled'; +var CLASS_HIDDEN = NAMESPACE + '-hidden'; +var CLASS_HIDE = NAMESPACE + '-hide'; +var CLASS_INVISIBLE = NAMESPACE + '-invisible'; +var CLASS_MODAL = NAMESPACE + '-modal'; +var CLASS_MOVE = NAMESPACE + '-move'; + +// Data keys +var DATA_ACTION = 'action'; +var DATA_PREVIEW = 'preview'; + +// Drag modes +var DRAG_MODE_CROP = 'crop'; +var DRAG_MODE_MOVE = 'move'; +var DRAG_MODE_NONE = 'none'; + +// Events +var EVENT_CROP = 'crop'; +var EVENT_CROP_END = 'cropend'; +var EVENT_CROP_MOVE = 'cropmove'; +var EVENT_CROP_START = 'cropstart'; +var EVENT_DBLCLICK = 'dblclick'; +var EVENT_LOAD = 'load'; +var EVENT_POINTER_DOWN = WINDOW.PointerEvent ? 'pointerdown' : 'touchstart mousedown'; +var EVENT_POINTER_MOVE = WINDOW.PointerEvent ? 'pointermove' : 'touchmove mousemove'; +var EVENT_POINTER_UP = WINDOW.PointerEvent ? 'pointerup pointercancel' : 'touchend touchcancel mouseup'; +var EVENT_READY = 'ready'; +var EVENT_RESIZE = 'resize'; +var EVENT_WHEEL = 'wheel mousewheel DOMMouseScroll'; +var EVENT_ZOOM = 'zoom'; + +// RegExps +var REGEXP_ACTIONS = /^(?:e|w|s|n|se|sw|ne|nw|all|crop|move|zoom)$/; +var REGEXP_DATA_URL = /^data:/; +var REGEXP_DATA_URL_JPEG = /^data:image\/jpeg;base64,/; +var REGEXP_TAG_NAME = /^(?:img|canvas)$/i; + +var DEFAULTS = { + // Define the view mode of the cropper + viewMode: 0, // 0, 1, 2, 3 + + // Define the dragging mode of the cropper + dragMode: DRAG_MODE_CROP, // 'crop', 'move' or 'none' + + // Define the aspect ratio of the crop box + aspectRatio: NaN, + + // An object with the previous cropping result data + data: null, + + // A selector for adding extra containers to preview + preview: '', + + // Re-render the cropper when resize the window + responsive: true, + + // Restore the cropped area after resize the window + restore: true, + + // Check if the current image is a cross-origin image + checkCrossOrigin: true, + + // Check the current image's Exif Orientation information + checkOrientation: true, + + // Show the black modal + modal: true, + + // Show the dashed lines for guiding + guides: true, + + // Show the center indicator for guiding + center: true, + + // Show the white modal to highlight the crop box + highlight: true, + + // Show the grid background + background: true, + + // Enable to crop the image automatically when initialize + autoCrop: true, + + // Define the percentage of automatic cropping area when initializes + autoCropArea: 0.8, + + // Enable to move the image + movable: true, + + // Enable to rotate the image + rotatable: true, + + // Enable to scale the image + scalable: true, + + // Enable to zoom the image + zoomable: true, + + // Enable to zoom the image by dragging touch + zoomOnTouch: true, + + // Enable to zoom the image by wheeling mouse + zoomOnWheel: true, + + // Define zoom ratio when zoom the image by wheeling mouse + wheelZoomRatio: 0.1, + + // Enable to move the crop box + cropBoxMovable: true, + + // Enable to resize the crop box + cropBoxResizable: true, + + // Toggle drag mode between "crop" and "move" when click twice on the cropper + toggleDragModeOnDblclick: true, + + // Size limitation + minCanvasWidth: 0, + minCanvasHeight: 0, + minCropBoxWidth: 0, + minCropBoxHeight: 0, + minContainerWidth: 200, + minContainerHeight: 100, + + // Shortcuts of events + ready: null, + cropstart: null, + cropmove: null, + cropend: null, + crop: null, + zoom: null +}; + +var TEMPLATE = '
' + '
' + '
' + '
' + '
' + '
' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '
' + '
'; + +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { + return typeof obj; +} : function (obj) { + return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; +}; + +var classCallCheck = function (instance, Constructor) { + if (!(instance instanceof Constructor)) { + throw new TypeError("Cannot call a class as a function"); + } +}; + +var createClass = function () { + function defineProperties(target, props) { + for (var i = 0; i < props.length; i++) { + var descriptor = props[i]; + descriptor.enumerable = descriptor.enumerable || false; + descriptor.configurable = true; + if ("value" in descriptor) descriptor.writable = true; + Object.defineProperty(target, descriptor.key, descriptor); + } + } + + return function (Constructor, protoProps, staticProps) { + if (protoProps) defineProperties(Constructor.prototype, protoProps); + if (staticProps) defineProperties(Constructor, staticProps); + return Constructor; + }; +}(); + +var toConsumableArray = function (arr) { + if (Array.isArray(arr)) { + for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; + + return arr2; + } else { + return Array.from(arr); + } +}; + +/** + * Check if the given value is not a number. + */ +var isNaN = Number.isNaN || WINDOW.isNaN; + +/** + * Check if the given value is a number. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a number, else `false`. + */ +function isNumber(value) { + return typeof value === 'number' && !isNaN(value); +} + +/** + * Check if the given value is undefined. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is undefined, else `false`. + */ +function isUndefined(value) { + return typeof value === 'undefined'; +} + +/** + * Check if the given value is an object. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is an object, else `false`. + */ +function isObject(value) { + return (typeof value === 'undefined' ? 'undefined' : _typeof(value)) === 'object' && value !== null; +} + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +/** + * Check if the given value is a plain object. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a plain object, else `false`. + */ + +function isPlainObject(value) { + if (!isObject(value)) { + return false; + } + + try { + var _constructor = value.constructor; + var prototype = _constructor.prototype; + + + return _constructor && prototype && hasOwnProperty.call(prototype, 'isPrototypeOf'); + } catch (e) { + return false; + } +} + +/** + * Check if the given value is a function. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a function, else `false`. + */ +function isFunction(value) { + return typeof value === 'function'; +} + +/** + * Iterate the given data. + * @param {*} data - The data to iterate. + * @param {Function} callback - The process function for each element. + * @returns {*} The original data. + */ +function forEach(data, callback) { + if (data && isFunction(callback)) { + if (Array.isArray(data) || isNumber(data.length) /* array-like */) { + var length = data.length; + + var i = void 0; + + for (i = 0; i < length; i += 1) { + if (callback.call(data, data[i], i, data) === false) { + break; + } + } + } else if (isObject(data)) { + Object.keys(data).forEach(function (key) { + callback.call(data, data[key], key, data); + }); + } + } + + return data; +} + +/** + * Extend the given object. + * @param {*} obj - The object to be extended. + * @param {*} args - The rest objects which will be merged to the first object. + * @returns {Object} The extended object. + */ +var assign = Object.assign || function assign(obj) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + if (isObject(obj) && args.length > 0) { + args.forEach(function (arg) { + if (isObject(arg)) { + Object.keys(arg).forEach(function (key) { + obj[key] = arg[key]; + }); + } + }); + } + + return obj; +}; + +var REGEXP_DECIMALS = /\.\d*(?:0|9){12}\d*$/i; + +/** + * Normalize decimal number. + * Check out {@link http://0.30000000000000004.com/} + * @param {number} value - The value to normalize. + * @param {number} [times=100000000000] - The times for normalizing. + * @returns {number} Returns the normalized number. + */ +function normalizeDecimalNumber(value) { + var times = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 100000000000; + + return REGEXP_DECIMALS.test(value) ? Math.round(value * times) / times : value; +} + +var REGEXP_SUFFIX = /^(?:width|height|left|top|marginLeft|marginTop)$/; + +/** + * Apply styles to the given element. + * @param {Element} element - The target element. + * @param {Object} styles - The styles for applying. + */ +function setStyle(element, styles) { + var style = element.style; + + + forEach(styles, function (value, property) { + if (REGEXP_SUFFIX.test(property) && isNumber(value)) { + value += 'px'; + } + + style[property] = value; + }); +} + +/** + * Check if the given element has a special class. + * @param {Element} element - The element to check. + * @param {string} value - The class to search. + * @returns {boolean} Returns `true` if the special class was found. + */ +function hasClass(element, value) { + return element.classList ? element.classList.contains(value) : element.className.indexOf(value) > -1; +} + +/** + * Add classes to the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be added. + */ +function addClass(element, value) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + addClass(elem, value); + }); + return; + } + + if (element.classList) { + element.classList.add(value); + return; + } + + var className = element.className.trim(); + + if (!className) { + element.className = value; + } else if (className.indexOf(value) < 0) { + element.className = className + ' ' + value; + } +} + +/** + * Remove classes from the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be removed. + */ +function removeClass(element, value) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + removeClass(elem, value); + }); + return; + } + + if (element.classList) { + element.classList.remove(value); + return; + } + + if (element.className.indexOf(value) >= 0) { + element.className = element.className.replace(value, ''); + } +} + +/** + * Add or remove classes from the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be toggled. + * @param {boolean} added - Add only. + */ +function toggleClass(element, value, added) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + toggleClass(elem, value, added); + }); + return; + } + + // IE10-11 doesn't support the second parameter of `classList.toggle` + if (added) { + addClass(element, value); + } else { + removeClass(element, value); + } +} + +var REGEXP_HYPHENATE = /([a-z\d])([A-Z])/g; + +/** + * Transform the given string from camelCase to kebab-case + * @param {string} value - The value to transform. + * @returns {string} The transformed value. + */ +function hyphenate(value) { + return value.replace(REGEXP_HYPHENATE, '$1-$2').toLowerCase(); +} + +/** + * Get data from the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to get. + * @returns {string} The data value. + */ +function getData(element, name) { + if (isObject(element[name])) { + return element[name]; + } else if (element.dataset) { + return element.dataset[name]; + } + + return element.getAttribute('data-' + hyphenate(name)); +} + +/** + * Set data to the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to set. + * @param {string} data - The data value. + */ +function setData(element, name, data) { + if (isObject(data)) { + element[name] = data; + } else if (element.dataset) { + element.dataset[name] = data; + } else { + element.setAttribute('data-' + hyphenate(name), data); + } +} + +/** + * Remove data from the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to remove. + */ +function removeData(element, name) { + if (isObject(element[name])) { + try { + delete element[name]; + } catch (e) { + element[name] = undefined; + } + } else if (element.dataset) { + // #128 Safari not allows to delete dataset property + try { + delete element.dataset[name]; + } catch (e) { + element.dataset[name] = undefined; + } + } else { + element.removeAttribute('data-' + hyphenate(name)); + } +} + +var REGEXP_SPACES = /\s\s*/; +var onceSupported = function () { + var supported = false; + + if (IN_BROWSER) { + var once = false; + var listener = function listener() {}; + var options = Object.defineProperty({}, 'once', { + get: function get$$1() { + supported = true; + return once; + }, + + + /** + * This setter can fix a `TypeError` in strict mode + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Getter_only} + * @param {boolean} value - The value to set + */ + set: function set$$1(value) { + once = value; + } + }); + + WINDOW.addEventListener('test', listener, options); + WINDOW.removeEventListener('test', listener, options); + } + + return supported; +}(); + +/** + * Remove event listener from the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Function} listener - The event listener. + * @param {Object} options - The event options. + */ +function removeListener(element, type, listener) { + var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var handler = listener; + + type.trim().split(REGEXP_SPACES).forEach(function (event) { + if (!onceSupported) { + var listeners = element.listeners; + + + if (listeners && listeners[event] && listeners[event][listener]) { + handler = listeners[event][listener]; + delete listeners[event][listener]; + + if (Object.keys(listeners[event]).length === 0) { + delete listeners[event]; + } + + if (Object.keys(listeners).length === 0) { + delete element.listeners; + } + } + } + + element.removeEventListener(event, handler, options); + }); +} + +/** + * Add event listener to the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Function} listener - The event listener. + * @param {Object} options - The event options. + */ +function addListener(element, type, listener) { + var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var _handler = listener; + + type.trim().split(REGEXP_SPACES).forEach(function (event) { + if (options.once && !onceSupported) { + var _element$listeners = element.listeners, + listeners = _element$listeners === undefined ? {} : _element$listeners; + + + _handler = function handler() { + for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { + args[_key2] = arguments[_key2]; + } + + delete listeners[event][listener]; + element.removeEventListener(event, _handler, options); + listener.apply(element, args); + }; + + if (!listeners[event]) { + listeners[event] = {}; + } + + if (listeners[event][listener]) { + element.removeEventListener(event, listeners[event][listener], options); + } + + listeners[event][listener] = _handler; + element.listeners = listeners; + } + + element.addEventListener(event, _handler, options); + }); +} + +/** + * Dispatch event on the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Object} data - The additional event data. + * @returns {boolean} Indicate if the event is default prevented or not. + */ +function dispatchEvent(element, type, data) { + var event = void 0; + + // Event and CustomEvent on IE9-11 are global objects, not constructors + if (isFunction(Event) && isFunction(CustomEvent)) { + event = new CustomEvent(type, { + detail: data, + bubbles: true, + cancelable: true + }); + } else { + event = document.createEvent('CustomEvent'); + event.initCustomEvent(type, true, true, data); + } + + return element.dispatchEvent(event); +} + +/** + * Get the offset base on the document. + * @param {Element} element - The target element. + * @returns {Object} The offset data. + */ +function getOffset(element) { + var box = element.getBoundingClientRect(); + + return { + left: box.left + (window.pageXOffset - document.documentElement.clientLeft), + top: box.top + (window.pageYOffset - document.documentElement.clientTop) + }; +} + +var location = WINDOW.location; + +var REGEXP_ORIGINS = /^(https?:)\/\/([^:/?#]+):?(\d*)/i; + +/** + * Check if the given URL is a cross origin URL. + * @param {string} url - The target URL. + * @returns {boolean} Returns `true` if the given URL is a cross origin URL, else `false`. + */ +function isCrossOriginURL(url) { + var parts = url.match(REGEXP_ORIGINS); + + return parts && (parts[1] !== location.protocol || parts[2] !== location.hostname || parts[3] !== location.port); +} + +/** + * Add timestamp to the given URL. + * @param {string} url - The target URL. + * @returns {string} The result URL. + */ +function addTimestamp(url) { + var timestamp = 'timestamp=' + new Date().getTime(); + + return url + (url.indexOf('?') === -1 ? '?' : '&') + timestamp; +} + +/** + * Get transforms base on the given object. + * @param {Object} obj - The target object. + * @returns {string} A string contains transform values. + */ +function getTransforms(_ref) { + var rotate = _ref.rotate, + scaleX = _ref.scaleX, + scaleY = _ref.scaleY, + translateX = _ref.translateX, + translateY = _ref.translateY; + + var values = []; + + if (isNumber(translateX) && translateX !== 0) { + values.push('translateX(' + translateX + 'px)'); + } + + if (isNumber(translateY) && translateY !== 0) { + values.push('translateY(' + translateY + 'px)'); + } + + // Rotate should come first before scale to match orientation transform + if (isNumber(rotate) && rotate !== 0) { + values.push('rotate(' + rotate + 'deg)'); + } + + if (isNumber(scaleX) && scaleX !== 1) { + values.push('scaleX(' + scaleX + ')'); + } + + if (isNumber(scaleY) && scaleY !== 1) { + values.push('scaleY(' + scaleY + ')'); + } + + var transform = values.length ? values.join(' ') : 'none'; + + return { + WebkitTransform: transform, + msTransform: transform, + transform: transform + }; +} + +/** + * Get the max ratio of a group of pointers. + * @param {string} pointers - The target pointers. + * @returns {number} The result ratio. + */ +function getMaxZoomRatio(pointers) { + var pointers2 = assign({}, pointers); + var ratios = []; + + forEach(pointers, function (pointer, pointerId) { + delete pointers2[pointerId]; + + forEach(pointers2, function (pointer2) { + var x1 = Math.abs(pointer.startX - pointer2.startX); + var y1 = Math.abs(pointer.startY - pointer2.startY); + var x2 = Math.abs(pointer.endX - pointer2.endX); + var y2 = Math.abs(pointer.endY - pointer2.endY); + var z1 = Math.sqrt(x1 * x1 + y1 * y1); + var z2 = Math.sqrt(x2 * x2 + y2 * y2); + var ratio = (z2 - z1) / z1; + + ratios.push(ratio); + }); + }); + + ratios.sort(function (a, b) { + return Math.abs(a) < Math.abs(b); + }); + + return ratios[0]; +} + +/** + * Get a pointer from an event object. + * @param {Object} event - The target event object. + * @param {boolean} endOnly - Indicates if only returns the end point coordinate or not. + * @returns {Object} The result pointer contains start and/or end point coordinates. + */ +function getPointer(_ref2, endOnly) { + var pageX = _ref2.pageX, + pageY = _ref2.pageY; + + var end = { + endX: pageX, + endY: pageY + }; + + return endOnly ? end : assign({ + startX: pageX, + startY: pageY + }, end); +} + +/** + * Get the center point coordinate of a group of pointers. + * @param {Object} pointers - The target pointers. + * @returns {Object} The center point coordinate. + */ +function getPointersCenter(pointers) { + var pageX = 0; + var pageY = 0; + var count = 0; + + forEach(pointers, function (_ref3) { + var startX = _ref3.startX, + startY = _ref3.startY; + + pageX += startX; + pageY += startY; + count += 1; + }); + + pageX /= count; + pageY /= count; + + return { + pageX: pageX, + pageY: pageY + }; +} + +/** + * Check if the given value is a finite number. + */ +var isFinite = Number.isFinite || WINDOW.isFinite; + +/** + * Get the max sizes in a rectangle under the given aspect ratio. + * @param {Object} data - The original sizes. + * @param {string} [type='contain'] - The adjust type. + * @returns {Object} The result sizes. + */ +function getAdjustedSizes(_ref4) // or 'cover' +{ + var aspectRatio = _ref4.aspectRatio, + height = _ref4.height, + width = _ref4.width; + var type = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'contain'; + + var isValidNumber = function isValidNumber(value) { + return isFinite(value) && value > 0; + }; + + if (isValidNumber(width) && isValidNumber(height)) { + var adjustedWidth = height * aspectRatio; + + if (type === 'contain' && adjustedWidth > width || type === 'cover' && adjustedWidth < width) { + height = width / aspectRatio; + } else { + width = height * aspectRatio; + } + } else if (isValidNumber(width)) { + height = width / aspectRatio; + } else if (isValidNumber(height)) { + width = height * aspectRatio; + } + + return { + width: width, + height: height + }; +} + +/** + * Get the new sizes of a rectangle after rotated. + * @param {Object} data - The original sizes. + * @returns {Object} The result sizes. + */ +function getRotatedSizes(_ref5) { + var width = _ref5.width, + height = _ref5.height, + degree = _ref5.degree; + + degree = Math.abs(degree) % 180; + + if (degree === 90) { + return { + width: height, + height: width + }; + } + + var arc = degree % 90 * Math.PI / 180; + var sinArc = Math.sin(arc); + var cosArc = Math.cos(arc); + var newWidth = width * cosArc + height * sinArc; + var newHeight = width * sinArc + height * cosArc; + + return degree > 90 ? { + width: newHeight, + height: newWidth + } : { + width: newWidth, + height: newHeight + }; +} + +/** + * Get a canvas which drew the given image. + * @param {HTMLImageElement} image - The image for drawing. + * @param {Object} imageData - The image data. + * @param {Object} canvasData - The canvas data. + * @param {Object} options - The options. + * @returns {HTMLCanvasElement} The result canvas. + */ +function getSourceCanvas(image, _ref6, _ref7, _ref8) { + var imageAspectRatio = _ref6.aspectRatio, + imageNaturalWidth = _ref6.naturalWidth, + imageNaturalHeight = _ref6.naturalHeight, + _ref6$rotate = _ref6.rotate, + rotate = _ref6$rotate === undefined ? 0 : _ref6$rotate, + _ref6$scaleX = _ref6.scaleX, + scaleX = _ref6$scaleX === undefined ? 1 : _ref6$scaleX, + _ref6$scaleY = _ref6.scaleY, + scaleY = _ref6$scaleY === undefined ? 1 : _ref6$scaleY; + var aspectRatio = _ref7.aspectRatio, + naturalWidth = _ref7.naturalWidth, + naturalHeight = _ref7.naturalHeight; + var _ref8$fillColor = _ref8.fillColor, + fillColor = _ref8$fillColor === undefined ? 'transparent' : _ref8$fillColor, + _ref8$imageSmoothingE = _ref8.imageSmoothingEnabled, + imageSmoothingEnabled = _ref8$imageSmoothingE === undefined ? true : _ref8$imageSmoothingE, + _ref8$imageSmoothingQ = _ref8.imageSmoothingQuality, + imageSmoothingQuality = _ref8$imageSmoothingQ === undefined ? 'low' : _ref8$imageSmoothingQ, + _ref8$maxWidth = _ref8.maxWidth, + maxWidth = _ref8$maxWidth === undefined ? Infinity : _ref8$maxWidth, + _ref8$maxHeight = _ref8.maxHeight, + maxHeight = _ref8$maxHeight === undefined ? Infinity : _ref8$maxHeight, + _ref8$minWidth = _ref8.minWidth, + minWidth = _ref8$minWidth === undefined ? 0 : _ref8$minWidth, + _ref8$minHeight = _ref8.minHeight, + minHeight = _ref8$minHeight === undefined ? 0 : _ref8$minHeight; + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + var maxSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: maxWidth, + height: maxHeight + }); + var minSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: minWidth, + height: minHeight + }, 'cover'); + var width = Math.min(maxSizes.width, Math.max(minSizes.width, naturalWidth)); + var height = Math.min(maxSizes.height, Math.max(minSizes.height, naturalHeight)); + + // Note: should always use image's natural sizes for drawing as + // imageData.naturalWidth === canvasData.naturalHeight when rotate % 180 === 90 + var destMaxSizes = getAdjustedSizes({ + aspectRatio: imageAspectRatio, + width: maxWidth, + height: maxHeight + }); + var destMinSizes = getAdjustedSizes({ + aspectRatio: imageAspectRatio, + width: minWidth, + height: minHeight + }, 'cover'); + var destWidth = Math.min(destMaxSizes.width, Math.max(destMinSizes.width, imageNaturalWidth)); + var destHeight = Math.min(destMaxSizes.height, Math.max(destMinSizes.height, imageNaturalHeight)); + var params = [-destWidth / 2, -destHeight / 2, destWidth, destHeight]; + + canvas.width = normalizeDecimalNumber(width); + canvas.height = normalizeDecimalNumber(height); + context.fillStyle = fillColor; + context.fillRect(0, 0, width, height); + context.save(); + context.translate(width / 2, height / 2); + context.rotate(rotate * Math.PI / 180); + context.scale(scaleX, scaleY); + context.imageSmoothingEnabled = imageSmoothingEnabled; + context.imageSmoothingQuality = imageSmoothingQuality; + context.drawImage.apply(context, [image].concat(toConsumableArray(params.map(function (param) { + return Math.floor(normalizeDecimalNumber(param)); + })))); + context.restore(); + return canvas; +} + +var fromCharCode = String.fromCharCode; + +/** + * Get string from char code in data view. + * @param {DataView} dataView - The data view for read. + * @param {number} start - The start index. + * @param {number} length - The read length. + * @returns {string} The read result. + */ + +function getStringFromCharCode(dataView, start, length) { + var str = ''; + var i = void 0; + + length += start; + + for (i = start; i < length; i += 1) { + str += fromCharCode(dataView.getUint8(i)); + } + + return str; +} + +var REGEXP_DATA_URL_HEAD = /^data:.*,/; + +/** + * Transform Data URL to array buffer. + * @param {string} dataURL - The Data URL to transform. + * @returns {ArrayBuffer} The result array buffer. + */ +function dataURLToArrayBuffer(dataURL) { + var base64 = dataURL.replace(REGEXP_DATA_URL_HEAD, ''); + var binary = atob(base64); + var arrayBuffer = new ArrayBuffer(binary.length); + var uint8 = new Uint8Array(arrayBuffer); + + forEach(uint8, function (value, i) { + uint8[i] = binary.charCodeAt(i); + }); + + return arrayBuffer; +} + +/** + * Transform array buffer to Data URL. + * @param {ArrayBuffer} arrayBuffer - The array buffer to transform. + * @param {string} mimeType - The mime type of the Data URL. + * @returns {string} The result Data URL. + */ +function arrayBufferToDataURL(arrayBuffer, mimeType) { + var uint8 = new Uint8Array(arrayBuffer); + var data = ''; + + // TypedArray.prototype.forEach is not supported in some browsers. + forEach(uint8, function (value) { + data += fromCharCode(value); + }); + + return 'data:' + mimeType + ';base64,' + btoa(data); +} + +/** + * Get orientation value from given array buffer. + * @param {ArrayBuffer} arrayBuffer - The array buffer to read. + * @returns {number} The read orientation value. + */ +function getOrientation(arrayBuffer) { + var dataView = new DataView(arrayBuffer); + var orientation = void 0; + var littleEndian = void 0; + var app1Start = void 0; + var ifdStart = void 0; + + // Only handle JPEG image (start by 0xFFD8) + if (dataView.getUint8(0) === 0xFF && dataView.getUint8(1) === 0xD8) { + var length = dataView.byteLength; + var offset = 2; + + while (offset < length) { + if (dataView.getUint8(offset) === 0xFF && dataView.getUint8(offset + 1) === 0xE1) { + app1Start = offset; + break; + } + + offset += 1; + } + } + + if (app1Start) { + var exifIDCode = app1Start + 4; + var tiffOffset = app1Start + 10; + + if (getStringFromCharCode(dataView, exifIDCode, 4) === 'Exif') { + var endianness = dataView.getUint16(tiffOffset); + + littleEndian = endianness === 0x4949; + + if (littleEndian || endianness === 0x4D4D /* bigEndian */) { + if (dataView.getUint16(tiffOffset + 2, littleEndian) === 0x002A) { + var firstIFDOffset = dataView.getUint32(tiffOffset + 4, littleEndian); + + if (firstIFDOffset >= 0x00000008) { + ifdStart = tiffOffset + firstIFDOffset; + } + } + } + } + } + + if (ifdStart) { + var _length = dataView.getUint16(ifdStart, littleEndian); + var _offset = void 0; + var i = void 0; + + for (i = 0; i < _length; i += 1) { + _offset = ifdStart + i * 12 + 2; + + if (dataView.getUint16(_offset, littleEndian) === 0x0112 /* Orientation */) { + // 8 is the offset of the current tag's value + _offset += 8; + + // Get the original orientation value + orientation = dataView.getUint16(_offset, littleEndian); + + // Override the orientation with its default value + dataView.setUint16(_offset, 1, littleEndian); + break; + } + } + } + + return orientation; +} + +/** + * Parse Exif Orientation value. + * @param {number} orientation - The orientation to parse. + * @returns {Object} The parsed result. + */ +function parseOrientation(orientation) { + var rotate = 0; + var scaleX = 1; + var scaleY = 1; + + switch (orientation) { + // Flip horizontal + case 2: + scaleX = -1; + break; + + // Rotate left 180° + case 3: + rotate = -180; + break; + + // Flip vertical + case 4: + scaleY = -1; + break; + + // Flip vertical and rotate right 90° + case 5: + rotate = 90; + scaleY = -1; + break; + + // Rotate right 90° + case 6: + rotate = 90; + break; + + // Flip horizontal and rotate right 90° + case 7: + rotate = 90; + scaleX = -1; + break; + + // Rotate left 90° + case 8: + rotate = -90; + break; + + default: + } + + return { + rotate: rotate, + scaleX: scaleX, + scaleY: scaleY + }; +} + +var render = { + render: function render() { + this.initContainer(); + this.initCanvas(); + this.initCropBox(); + this.renderCanvas(); + + if (this.cropped) { + this.renderCropBox(); + } + }, + initContainer: function initContainer() { + var element = this.element, + options = this.options, + container = this.container, + cropper = this.cropper; + + + addClass(cropper, CLASS_HIDDEN); + removeClass(element, CLASS_HIDDEN); + + var containerData = { + width: Math.max(container.offsetWidth, Number(options.minContainerWidth) || 200), + height: Math.max(container.offsetHeight, Number(options.minContainerHeight) || 100) + }; + + this.containerData = containerData; + + setStyle(cropper, { + width: containerData.width, + height: containerData.height + }); + + addClass(element, CLASS_HIDDEN); + removeClass(cropper, CLASS_HIDDEN); + }, + + + // Canvas (image wrapper) + initCanvas: function initCanvas() { + var containerData = this.containerData, + imageData = this.imageData; + var viewMode = this.options.viewMode; + + var rotated = Math.abs(imageData.rotate) % 180 === 90; + var naturalWidth = rotated ? imageData.naturalHeight : imageData.naturalWidth; + var naturalHeight = rotated ? imageData.naturalWidth : imageData.naturalHeight; + var aspectRatio = naturalWidth / naturalHeight; + var canvasWidth = containerData.width; + var canvasHeight = containerData.height; + + if (containerData.height * aspectRatio > containerData.width) { + if (viewMode === 3) { + canvasWidth = containerData.height * aspectRatio; + } else { + canvasHeight = containerData.width / aspectRatio; + } + } else if (viewMode === 3) { + canvasHeight = containerData.width / aspectRatio; + } else { + canvasWidth = containerData.height * aspectRatio; + } + + var canvasData = { + aspectRatio: aspectRatio, + naturalWidth: naturalWidth, + naturalHeight: naturalHeight, + width: canvasWidth, + height: canvasHeight + }; + + canvasData.left = (containerData.width - canvasWidth) / 2; + canvasData.top = (containerData.height - canvasHeight) / 2; + canvasData.oldLeft = canvasData.left; + canvasData.oldTop = canvasData.top; + + this.canvasData = canvasData; + this.limited = viewMode === 1 || viewMode === 2; + this.limitCanvas(true, true); + this.initialImageData = assign({}, imageData); + this.initialCanvasData = assign({}, canvasData); + }, + limitCanvas: function limitCanvas(sizeLimited, positionLimited) { + var options = this.options, + containerData = this.containerData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + var viewMode = options.viewMode; + var aspectRatio = canvasData.aspectRatio; + + var cropped = this.cropped && cropBoxData; + + if (sizeLimited) { + var minCanvasWidth = Number(options.minCanvasWidth) || 0; + var minCanvasHeight = Number(options.minCanvasHeight) || 0; + + if (viewMode > 1) { + minCanvasWidth = Math.max(minCanvasWidth, containerData.width); + minCanvasHeight = Math.max(minCanvasHeight, containerData.height); + + if (viewMode === 3) { + if (minCanvasHeight * aspectRatio > minCanvasWidth) { + minCanvasWidth = minCanvasHeight * aspectRatio; + } else { + minCanvasHeight = minCanvasWidth / aspectRatio; + } + } + } else if (viewMode > 0) { + if (minCanvasWidth) { + minCanvasWidth = Math.max(minCanvasWidth, cropped ? cropBoxData.width : 0); + } else if (minCanvasHeight) { + minCanvasHeight = Math.max(minCanvasHeight, cropped ? cropBoxData.height : 0); + } else if (cropped) { + minCanvasWidth = cropBoxData.width; + minCanvasHeight = cropBoxData.height; + + if (minCanvasHeight * aspectRatio > minCanvasWidth) { + minCanvasWidth = minCanvasHeight * aspectRatio; + } else { + minCanvasHeight = minCanvasWidth / aspectRatio; + } + } + } + + var _getAdjustedSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: minCanvasWidth, + height: minCanvasHeight + }); + + minCanvasWidth = _getAdjustedSizes.width; + minCanvasHeight = _getAdjustedSizes.height; + + + canvasData.minWidth = minCanvasWidth; + canvasData.minHeight = minCanvasHeight; + canvasData.maxWidth = Infinity; + canvasData.maxHeight = Infinity; + } + + if (positionLimited) { + if (viewMode) { + var newCanvasLeft = containerData.width - canvasData.width; + var newCanvasTop = containerData.height - canvasData.height; + + canvasData.minLeft = Math.min(0, newCanvasLeft); + canvasData.minTop = Math.min(0, newCanvasTop); + canvasData.maxLeft = Math.max(0, newCanvasLeft); + canvasData.maxTop = Math.max(0, newCanvasTop); + + if (cropped && this.limited) { + canvasData.minLeft = Math.min(cropBoxData.left, cropBoxData.left + (cropBoxData.width - canvasData.width)); + canvasData.minTop = Math.min(cropBoxData.top, cropBoxData.top + (cropBoxData.height - canvasData.height)); + canvasData.maxLeft = cropBoxData.left; + canvasData.maxTop = cropBoxData.top; + + if (viewMode === 2) { + if (canvasData.width >= containerData.width) { + canvasData.minLeft = Math.min(0, newCanvasLeft); + canvasData.maxLeft = Math.max(0, newCanvasLeft); + } + + if (canvasData.height >= containerData.height) { + canvasData.minTop = Math.min(0, newCanvasTop); + canvasData.maxTop = Math.max(0, newCanvasTop); + } + } + } + } else { + canvasData.minLeft = -canvasData.width; + canvasData.minTop = -canvasData.height; + canvasData.maxLeft = containerData.width; + canvasData.maxTop = containerData.height; + } + } + }, + renderCanvas: function renderCanvas(changed, transformed) { + var canvasData = this.canvasData, + imageData = this.imageData; + + + if (transformed) { + var _getRotatedSizes = getRotatedSizes({ + width: imageData.naturalWidth * Math.abs(imageData.scaleX || 1), + height: imageData.naturalHeight * Math.abs(imageData.scaleY || 1), + degree: imageData.rotate || 0 + }), + naturalWidth = _getRotatedSizes.width, + naturalHeight = _getRotatedSizes.height; + + var width = canvasData.width * (naturalWidth / canvasData.naturalWidth); + var height = canvasData.height * (naturalHeight / canvasData.naturalHeight); + + canvasData.left -= (width - canvasData.width) / 2; + canvasData.top -= (height - canvasData.height) / 2; + canvasData.width = width; + canvasData.height = height; + canvasData.aspectRatio = naturalWidth / naturalHeight; + canvasData.naturalWidth = naturalWidth; + canvasData.naturalHeight = naturalHeight; + this.limitCanvas(true, false); + } + + if (canvasData.width > canvasData.maxWidth || canvasData.width < canvasData.minWidth) { + canvasData.left = canvasData.oldLeft; + } + + if (canvasData.height > canvasData.maxHeight || canvasData.height < canvasData.minHeight) { + canvasData.top = canvasData.oldTop; + } + + canvasData.width = Math.min(Math.max(canvasData.width, canvasData.minWidth), canvasData.maxWidth); + canvasData.height = Math.min(Math.max(canvasData.height, canvasData.minHeight), canvasData.maxHeight); + + this.limitCanvas(false, true); + + canvasData.left = Math.min(Math.max(canvasData.left, canvasData.minLeft), canvasData.maxLeft); + canvasData.top = Math.min(Math.max(canvasData.top, canvasData.minTop), canvasData.maxTop); + canvasData.oldLeft = canvasData.left; + canvasData.oldTop = canvasData.top; + + setStyle(this.canvas, assign({ + width: canvasData.width, + height: canvasData.height + }, getTransforms({ + translateX: canvasData.left, + translateY: canvasData.top + }))); + + this.renderImage(changed); + + if (this.cropped && this.limited) { + this.limitCropBox(true, true); + } + }, + renderImage: function renderImage(changed) { + var canvasData = this.canvasData, + imageData = this.imageData; + + var width = imageData.naturalWidth * (canvasData.width / canvasData.naturalWidth); + var height = imageData.naturalHeight * (canvasData.height / canvasData.naturalHeight); + + assign(imageData, { + width: width, + height: height, + left: (canvasData.width - width) / 2, + top: (canvasData.height - height) / 2 + }); + setStyle(this.image, assign({ + width: imageData.width, + height: imageData.height + }, getTransforms(assign({ + translateX: imageData.left, + translateY: imageData.top + }, imageData)))); + + if (changed) { + this.output(); + } + }, + initCropBox: function initCropBox() { + var options = this.options, + canvasData = this.canvasData; + var aspectRatio = options.aspectRatio; + + var autoCropArea = Number(options.autoCropArea) || 0.8; + var cropBoxData = { + width: canvasData.width, + height: canvasData.height + }; + + if (aspectRatio) { + if (canvasData.height * aspectRatio > canvasData.width) { + cropBoxData.height = cropBoxData.width / aspectRatio; + } else { + cropBoxData.width = cropBoxData.height * aspectRatio; + } + } + + this.cropBoxData = cropBoxData; + this.limitCropBox(true, true); + + // Initialize auto crop area + cropBoxData.width = Math.min(Math.max(cropBoxData.width, cropBoxData.minWidth), cropBoxData.maxWidth); + cropBoxData.height = Math.min(Math.max(cropBoxData.height, cropBoxData.minHeight), cropBoxData.maxHeight); + + // The width/height of auto crop area must large than "minWidth/Height" + cropBoxData.width = Math.max(cropBoxData.minWidth, cropBoxData.width * autoCropArea); + cropBoxData.height = Math.max(cropBoxData.minHeight, cropBoxData.height * autoCropArea); + cropBoxData.left = canvasData.left + (canvasData.width - cropBoxData.width) / 2; + cropBoxData.top = canvasData.top + (canvasData.height - cropBoxData.height) / 2; + cropBoxData.oldLeft = cropBoxData.left; + cropBoxData.oldTop = cropBoxData.top; + + this.initialCropBoxData = assign({}, cropBoxData); + }, + limitCropBox: function limitCropBox(sizeLimited, positionLimited) { + var options = this.options, + containerData = this.containerData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData, + limited = this.limited; + var aspectRatio = options.aspectRatio; + + + if (sizeLimited) { + var minCropBoxWidth = Number(options.minCropBoxWidth) || 0; + var minCropBoxHeight = Number(options.minCropBoxHeight) || 0; + var maxCropBoxWidth = Math.min(containerData.width, limited ? canvasData.width : containerData.width); + var maxCropBoxHeight = Math.min(containerData.height, limited ? canvasData.height : containerData.height); + + // The min/maxCropBoxWidth/Height must be less than container's width/height + minCropBoxWidth = Math.min(minCropBoxWidth, containerData.width); + minCropBoxHeight = Math.min(minCropBoxHeight, containerData.height); + + if (aspectRatio) { + if (minCropBoxWidth && minCropBoxHeight) { + if (minCropBoxHeight * aspectRatio > minCropBoxWidth) { + minCropBoxHeight = minCropBoxWidth / aspectRatio; + } else { + minCropBoxWidth = minCropBoxHeight * aspectRatio; + } + } else if (minCropBoxWidth) { + minCropBoxHeight = minCropBoxWidth / aspectRatio; + } else if (minCropBoxHeight) { + minCropBoxWidth = minCropBoxHeight * aspectRatio; + } + + if (maxCropBoxHeight * aspectRatio > maxCropBoxWidth) { + maxCropBoxHeight = maxCropBoxWidth / aspectRatio; + } else { + maxCropBoxWidth = maxCropBoxHeight * aspectRatio; + } + } + + // The minWidth/Height must be less than maxWidth/Height + cropBoxData.minWidth = Math.min(minCropBoxWidth, maxCropBoxWidth); + cropBoxData.minHeight = Math.min(minCropBoxHeight, maxCropBoxHeight); + cropBoxData.maxWidth = maxCropBoxWidth; + cropBoxData.maxHeight = maxCropBoxHeight; + } + + if (positionLimited) { + if (limited) { + cropBoxData.minLeft = Math.max(0, canvasData.left); + cropBoxData.minTop = Math.max(0, canvasData.top); + cropBoxData.maxLeft = Math.min(containerData.width, canvasData.left + canvasData.width) - cropBoxData.width; + cropBoxData.maxTop = Math.min(containerData.height, canvasData.top + canvasData.height) - cropBoxData.height; + } else { + cropBoxData.minLeft = 0; + cropBoxData.minTop = 0; + cropBoxData.maxLeft = containerData.width - cropBoxData.width; + cropBoxData.maxTop = containerData.height - cropBoxData.height; + } + } + }, + renderCropBox: function renderCropBox() { + var options = this.options, + containerData = this.containerData, + cropBoxData = this.cropBoxData; + + + if (cropBoxData.width > cropBoxData.maxWidth || cropBoxData.width < cropBoxData.minWidth) { + cropBoxData.left = cropBoxData.oldLeft; + } + + if (cropBoxData.height > cropBoxData.maxHeight || cropBoxData.height < cropBoxData.minHeight) { + cropBoxData.top = cropBoxData.oldTop; + } + + cropBoxData.width = Math.min(Math.max(cropBoxData.width, cropBoxData.minWidth), cropBoxData.maxWidth); + cropBoxData.height = Math.min(Math.max(cropBoxData.height, cropBoxData.minHeight), cropBoxData.maxHeight); + + this.limitCropBox(false, true); + + cropBoxData.left = Math.min(Math.max(cropBoxData.left, cropBoxData.minLeft), cropBoxData.maxLeft); + cropBoxData.top = Math.min(Math.max(cropBoxData.top, cropBoxData.minTop), cropBoxData.maxTop); + cropBoxData.oldLeft = cropBoxData.left; + cropBoxData.oldTop = cropBoxData.top; + + if (options.movable && options.cropBoxMovable) { + // Turn to move the canvas when the crop box is equal to the container + setData(this.face, DATA_ACTION, cropBoxData.width >= containerData.width && cropBoxData.height >= containerData.height ? ACTION_MOVE : ACTION_ALL); + } + + setStyle(this.cropBox, assign({ + width: cropBoxData.width, + height: cropBoxData.height + }, getTransforms({ + translateX: cropBoxData.left, + translateY: cropBoxData.top + }))); + + if (this.cropped && this.limited) { + this.limitCanvas(true, true); + } + + if (!this.disabled) { + this.output(); + } + }, + output: function output() { + this.preview(); + dispatchEvent(this.element, EVENT_CROP, this.getData()); + } +}; + +var preview = { + initPreview: function initPreview() { + var crossOrigin = this.crossOrigin; + var preview = this.options.preview; + + var url = crossOrigin ? this.crossOriginUrl : this.url; + var image = document.createElement('img'); + + if (crossOrigin) { + image.crossOrigin = crossOrigin; + } + + image.src = url; + this.viewBox.appendChild(image); + this.viewBoxImage = image; + + if (!preview) { + return; + } + + var previews = preview; + + if (typeof preview === 'string') { + previews = this.element.ownerDocument.querySelectorAll(preview); + } else if (preview.querySelector) { + previews = [preview]; + } + + this.previews = previews; + + forEach(previews, function (el) { + var img = document.createElement('img'); + + // Save the original size for recover + setData(el, DATA_PREVIEW, { + width: el.offsetWidth, + height: el.offsetHeight, + html: el.innerHTML + }); + + if (crossOrigin) { + img.crossOrigin = crossOrigin; + } + + img.src = url; + + /** + * Override img element styles + * Add `display:block` to avoid margin top issue + * Add `height:auto` to override `height` attribute on IE8 + * (Occur only when margin-top <= -height) + */ + img.style.cssText = 'display:block;' + 'width:100%;' + 'height:auto;' + 'min-width:0!important;' + 'min-height:0!important;' + 'max-width:none!important;' + 'max-height:none!important;' + 'image-orientation:0deg!important;"'; + + el.innerHTML = ''; + el.appendChild(img); + }); + }, + resetPreview: function resetPreview() { + forEach(this.previews, function (element) { + var data = getData(element, DATA_PREVIEW); + + setStyle(element, { + width: data.width, + height: data.height + }); + + element.innerHTML = data.html; + removeData(element, DATA_PREVIEW); + }); + }, + preview: function preview() { + var imageData = this.imageData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + var cropBoxWidth = cropBoxData.width, + cropBoxHeight = cropBoxData.height; + var width = imageData.width, + height = imageData.height; + + var left = cropBoxData.left - canvasData.left - imageData.left; + var top = cropBoxData.top - canvasData.top - imageData.top; + + if (!this.cropped || this.disabled) { + return; + } + + setStyle(this.viewBoxImage, assign({ + width: width, + height: height + }, getTransforms(assign({ + translateX: -left, + translateY: -top + }, imageData)))); + + forEach(this.previews, function (element) { + var data = getData(element, DATA_PREVIEW); + var originalWidth = data.width; + var originalHeight = data.height; + var newWidth = originalWidth; + var newHeight = originalHeight; + var ratio = 1; + + if (cropBoxWidth) { + ratio = originalWidth / cropBoxWidth; + newHeight = cropBoxHeight * ratio; + } + + if (cropBoxHeight && newHeight > originalHeight) { + ratio = originalHeight / cropBoxHeight; + newWidth = cropBoxWidth * ratio; + newHeight = originalHeight; + } + + setStyle(element, { + width: newWidth, + height: newHeight + }); + + setStyle(element.getElementsByTagName('img')[0], assign({ + width: width * ratio, + height: height * ratio + }, getTransforms(assign({ + translateX: -left * ratio, + translateY: -top * ratio + }, imageData)))); + }); + } +}; + +var events = { + bind: function bind() { + var element = this.element, + options = this.options, + cropper = this.cropper; + + + if (isFunction(options.cropstart)) { + addListener(element, EVENT_CROP_START, options.cropstart); + } + + if (isFunction(options.cropmove)) { + addListener(element, EVENT_CROP_MOVE, options.cropmove); + } + + if (isFunction(options.cropend)) { + addListener(element, EVENT_CROP_END, options.cropend); + } + + if (isFunction(options.crop)) { + addListener(element, EVENT_CROP, options.crop); + } + + if (isFunction(options.zoom)) { + addListener(element, EVENT_ZOOM, options.zoom); + } + + addListener(cropper, EVENT_POINTER_DOWN, this.onCropStart = this.cropStart.bind(this)); + + if (options.zoomable && options.zoomOnWheel) { + addListener(cropper, EVENT_WHEEL, this.onWheel = this.wheel.bind(this)); + } + + if (options.toggleDragModeOnDblclick) { + addListener(cropper, EVENT_DBLCLICK, this.onDblclick = this.dblclick.bind(this)); + } + + addListener(element.ownerDocument, EVENT_POINTER_MOVE, this.onCropMove = this.cropMove.bind(this)); + addListener(element.ownerDocument, EVENT_POINTER_UP, this.onCropEnd = this.cropEnd.bind(this)); + + if (options.responsive) { + addListener(window, EVENT_RESIZE, this.onResize = this.resize.bind(this)); + } + }, + unbind: function unbind() { + var element = this.element, + options = this.options, + cropper = this.cropper; + + + if (isFunction(options.cropstart)) { + removeListener(element, EVENT_CROP_START, options.cropstart); + } + + if (isFunction(options.cropmove)) { + removeListener(element, EVENT_CROP_MOVE, options.cropmove); + } + + if (isFunction(options.cropend)) { + removeListener(element, EVENT_CROP_END, options.cropend); + } + + if (isFunction(options.crop)) { + removeListener(element, EVENT_CROP, options.crop); + } + + if (isFunction(options.zoom)) { + removeListener(element, EVENT_ZOOM, options.zoom); + } + + removeListener(cropper, EVENT_POINTER_DOWN, this.onCropStart); + + if (options.zoomable && options.zoomOnWheel) { + removeListener(cropper, EVENT_WHEEL, this.onWheel); + } + + if (options.toggleDragModeOnDblclick) { + removeListener(cropper, EVENT_DBLCLICK, this.onDblclick); + } + + removeListener(element.ownerDocument, EVENT_POINTER_MOVE, this.onCropMove); + removeListener(element.ownerDocument, EVENT_POINTER_UP, this.onCropEnd); + + if (options.responsive) { + removeListener(window, EVENT_RESIZE, this.onResize); + } + } +}; + +var handlers = { + resize: function resize() { + var options = this.options, + container = this.container, + containerData = this.containerData; + + var minContainerWidth = Number(options.minContainerWidth) || 200; + var minContainerHeight = Number(options.minContainerHeight) || 100; + + if (this.disabled || containerData.width <= minContainerWidth || containerData.height <= minContainerHeight) { + return; + } + + var ratio = container.offsetWidth / containerData.width; + + // Resize when width changed or height changed + if (ratio !== 1 || container.offsetHeight !== containerData.height) { + var canvasData = void 0; + var cropBoxData = void 0; + + if (options.restore) { + canvasData = this.getCanvasData(); + cropBoxData = this.getCropBoxData(); + } + + this.render(); + + if (options.restore) { + this.setCanvasData(forEach(canvasData, function (n, i) { + canvasData[i] = n * ratio; + })); + this.setCropBoxData(forEach(cropBoxData, function (n, i) { + cropBoxData[i] = n * ratio; + })); + } + } + }, + dblclick: function dblclick() { + if (this.disabled || this.options.dragMode === DRAG_MODE_NONE) { + return; + } + + this.setDragMode(hasClass(this.dragBox, CLASS_CROP) ? DRAG_MODE_MOVE : DRAG_MODE_CROP); + }, + wheel: function wheel(e) { + var _this = this; + + var ratio = Number(this.options.wheelZoomRatio) || 0.1; + var delta = 1; + + if (this.disabled) { + return; + } + + e.preventDefault(); + + // Limit wheel speed to prevent zoom too fast (#21) + if (this.wheeling) { + return; + } + + this.wheeling = true; + + setTimeout(function () { + _this.wheeling = false; + }, 50); + + if (e.deltaY) { + delta = e.deltaY > 0 ? 1 : -1; + } else if (e.wheelDelta) { + delta = -e.wheelDelta / 120; + } else if (e.detail) { + delta = e.detail > 0 ? 1 : -1; + } + + this.zoom(-delta * ratio, e); + }, + cropStart: function cropStart(e) { + if (this.disabled) { + return; + } + + var options = this.options, + pointers = this.pointers; + + var action = void 0; + + if (e.changedTouches) { + // Handle touch event + forEach(e.changedTouches, function (touch) { + pointers[touch.identifier] = getPointer(touch); + }); + } else { + // Handle mouse event and pointer event + pointers[e.pointerId || 0] = getPointer(e); + } + + if (Object.keys(pointers).length > 1 && options.zoomable && options.zoomOnTouch) { + action = ACTION_ZOOM; + } else { + action = getData(e.target, DATA_ACTION); + } + + if (!REGEXP_ACTIONS.test(action)) { + return; + } + + if (dispatchEvent(this.element, EVENT_CROP_START, { + originalEvent: e, + action: action + }) === false) { + return; + } + + e.preventDefault(); + + this.action = action; + this.cropping = false; + + if (action === ACTION_CROP) { + this.cropping = true; + addClass(this.dragBox, CLASS_MODAL); + } + }, + cropMove: function cropMove(e) { + var action = this.action; + + + if (this.disabled || !action) { + return; + } + + var pointers = this.pointers; + + + e.preventDefault(); + + if (dispatchEvent(this.element, EVENT_CROP_MOVE, { + originalEvent: e, + action: action + }) === false) { + return; + } + + if (e.changedTouches) { + forEach(e.changedTouches, function (touch) { + assign(pointers[touch.identifier], getPointer(touch, true)); + }); + } else { + assign(pointers[e.pointerId || 0], getPointer(e, true)); + } + + this.change(e); + }, + cropEnd: function cropEnd(e) { + if (this.disabled) { + return; + } + + var action = this.action, + pointers = this.pointers; + + + if (e.changedTouches) { + forEach(e.changedTouches, function (touch) { + delete pointers[touch.identifier]; + }); + } else { + delete pointers[e.pointerId || 0]; + } + + if (!action) { + return; + } + + e.preventDefault(); + + if (!Object.keys(pointers).length) { + this.action = ''; + } + + if (this.cropping) { + this.cropping = false; + toggleClass(this.dragBox, CLASS_MODAL, this.cropped && this.options.modal); + } + + dispatchEvent(this.element, EVENT_CROP_END, { + originalEvent: e, + action: action + }); + } +}; + +var change = { + change: function change(e) { + var options = this.options, + canvasData = this.canvasData, + containerData = this.containerData, + cropBoxData = this.cropBoxData, + pointers = this.pointers; + var action = this.action; + var aspectRatio = options.aspectRatio; + var left = cropBoxData.left, + top = cropBoxData.top, + width = cropBoxData.width, + height = cropBoxData.height; + + var right = left + width; + var bottom = top + height; + var minLeft = 0; + var minTop = 0; + var maxWidth = containerData.width; + var maxHeight = containerData.height; + var renderable = true; + var offset = void 0; + + // Locking aspect ratio in "free mode" by holding shift key + if (!aspectRatio && e.shiftKey) { + aspectRatio = width && height ? width / height : 1; + } + + if (this.limited) { + minLeft = cropBoxData.minLeft; + minTop = cropBoxData.minTop; + + maxWidth = minLeft + Math.min(containerData.width, canvasData.width, canvasData.left + canvasData.width); + maxHeight = minTop + Math.min(containerData.height, canvasData.height, canvasData.top + canvasData.height); + } + + var pointer = pointers[Object.keys(pointers)[0]]; + var range = { + x: pointer.endX - pointer.startX, + y: pointer.endY - pointer.startY + }; + var check = function check(side) { + switch (side) { + case ACTION_EAST: + if (right + range.x > maxWidth) { + range.x = maxWidth - right; + } + + break; + + case ACTION_WEST: + if (left + range.x < minLeft) { + range.x = minLeft - left; + } + + break; + + case ACTION_NORTH: + if (top + range.y < minTop) { + range.y = minTop - top; + } + + break; + + case ACTION_SOUTH: + if (bottom + range.y > maxHeight) { + range.y = maxHeight - bottom; + } + + break; + + default: + } + }; + + switch (action) { + // Move crop box + case ACTION_ALL: + left += range.x; + top += range.y; + break; + + // Resize crop box + case ACTION_EAST: + if (range.x >= 0 && (right >= maxWidth || aspectRatio && (top <= minTop || bottom >= maxHeight))) { + renderable = false; + break; + } + + check(ACTION_EAST); + width += range.x; + + if (aspectRatio) { + height = width / aspectRatio; + top -= range.x / aspectRatio / 2; + } + + if (width < 0) { + action = ACTION_WEST; + width = 0; + } + + break; + + case ACTION_NORTH: + if (range.y <= 0 && (top <= minTop || aspectRatio && (left <= minLeft || right >= maxWidth))) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + + if (aspectRatio) { + width = height * aspectRatio; + left += range.y * aspectRatio / 2; + } + + if (height < 0) { + action = ACTION_SOUTH; + height = 0; + } + + break; + + case ACTION_WEST: + if (range.x <= 0 && (left <= minLeft || aspectRatio && (top <= minTop || bottom >= maxHeight))) { + renderable = false; + break; + } + + check(ACTION_WEST); + width -= range.x; + left += range.x; + + if (aspectRatio) { + height = width / aspectRatio; + top += range.x / aspectRatio / 2; + } + + if (width < 0) { + action = ACTION_EAST; + width = 0; + } + + break; + + case ACTION_SOUTH: + if (range.y >= 0 && (bottom >= maxHeight || aspectRatio && (left <= minLeft || right >= maxWidth))) { + renderable = false; + break; + } + + check(ACTION_SOUTH); + height += range.y; + + if (aspectRatio) { + width = height * aspectRatio; + left -= range.y * aspectRatio / 2; + } + + if (height < 0) { + action = ACTION_NORTH; + height = 0; + } + + break; + + case ACTION_NORTH_EAST: + if (aspectRatio) { + if (range.y <= 0 && (top <= minTop || right >= maxWidth)) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + width = height * aspectRatio; + } else { + check(ACTION_NORTH); + check(ACTION_EAST); + + if (range.x >= 0) { + if (right < maxWidth) { + width += range.x; + } else if (range.y <= 0 && top <= minTop) { + renderable = false; + } + } else { + width += range.x; + } + + if (range.y <= 0) { + if (top > minTop) { + height -= range.y; + top += range.y; + } + } else { + height -= range.y; + top += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_SOUTH_WEST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_NORTH_WEST; + width = 0; + } else if (height < 0) { + action = ACTION_SOUTH_EAST; + height = 0; + } + + break; + + case ACTION_NORTH_WEST: + if (aspectRatio) { + if (range.y <= 0 && (top <= minTop || left <= minLeft)) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + width = height * aspectRatio; + left += range.y * aspectRatio; + } else { + check(ACTION_NORTH); + check(ACTION_WEST); + + if (range.x <= 0) { + if (left > minLeft) { + width -= range.x; + left += range.x; + } else if (range.y <= 0 && top <= minTop) { + renderable = false; + } + } else { + width -= range.x; + left += range.x; + } + + if (range.y <= 0) { + if (top > minTop) { + height -= range.y; + top += range.y; + } + } else { + height -= range.y; + top += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_SOUTH_EAST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_NORTH_EAST; + width = 0; + } else if (height < 0) { + action = ACTION_SOUTH_WEST; + height = 0; + } + + break; + + case ACTION_SOUTH_WEST: + if (aspectRatio) { + if (range.x <= 0 && (left <= minLeft || bottom >= maxHeight)) { + renderable = false; + break; + } + + check(ACTION_WEST); + width -= range.x; + left += range.x; + height = width / aspectRatio; + } else { + check(ACTION_SOUTH); + check(ACTION_WEST); + + if (range.x <= 0) { + if (left > minLeft) { + width -= range.x; + left += range.x; + } else if (range.y >= 0 && bottom >= maxHeight) { + renderable = false; + } + } else { + width -= range.x; + left += range.x; + } + + if (range.y >= 0) { + if (bottom < maxHeight) { + height += range.y; + } + } else { + height += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_NORTH_EAST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_SOUTH_EAST; + width = 0; + } else if (height < 0) { + action = ACTION_NORTH_WEST; + height = 0; + } + + break; + + case ACTION_SOUTH_EAST: + if (aspectRatio) { + if (range.x >= 0 && (right >= maxWidth || bottom >= maxHeight)) { + renderable = false; + break; + } + + check(ACTION_EAST); + width += range.x; + height = width / aspectRatio; + } else { + check(ACTION_SOUTH); + check(ACTION_EAST); + + if (range.x >= 0) { + if (right < maxWidth) { + width += range.x; + } else if (range.y >= 0 && bottom >= maxHeight) { + renderable = false; + } + } else { + width += range.x; + } + + if (range.y >= 0) { + if (bottom < maxHeight) { + height += range.y; + } + } else { + height += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_NORTH_WEST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_SOUTH_WEST; + width = 0; + } else if (height < 0) { + action = ACTION_NORTH_EAST; + height = 0; + } + + break; + + // Move canvas + case ACTION_MOVE: + this.move(range.x, range.y); + renderable = false; + break; + + // Zoom canvas + case ACTION_ZOOM: + this.zoom(getMaxZoomRatio(pointers), e); + renderable = false; + break; + + // Create crop box + case ACTION_CROP: + if (!range.x || !range.y) { + renderable = false; + break; + } + + offset = getOffset(this.cropper); + left = pointer.startX - offset.left; + top = pointer.startY - offset.top; + width = cropBoxData.minWidth; + height = cropBoxData.minHeight; + + if (range.x > 0) { + action = range.y > 0 ? ACTION_SOUTH_EAST : ACTION_NORTH_EAST; + } else if (range.x < 0) { + left -= width; + action = range.y > 0 ? ACTION_SOUTH_WEST : ACTION_NORTH_WEST; + } + + if (range.y < 0) { + top -= height; + } + + // Show the crop box if is hidden + if (!this.cropped) { + removeClass(this.cropBox, CLASS_HIDDEN); + this.cropped = true; + + if (this.limited) { + this.limitCropBox(true, true); + } + } + + break; + + default: + } + + if (renderable) { + cropBoxData.width = width; + cropBoxData.height = height; + cropBoxData.left = left; + cropBoxData.top = top; + this.action = action; + this.renderCropBox(); + } + + // Override + forEach(pointers, function (p) { + p.startX = p.endX; + p.startY = p.endY; + }); + } +}; + +var methods = { + // Show the crop box manually + crop: function crop() { + if (this.ready && !this.cropped && !this.disabled) { + this.cropped = true; + this.limitCropBox(true, true); + + if (this.options.modal) { + addClass(this.dragBox, CLASS_MODAL); + } + + removeClass(this.cropBox, CLASS_HIDDEN); + this.setCropBoxData(this.initialCropBoxData); + } + + return this; + }, + + + // Reset the image and crop box to their initial states + reset: function reset() { + if (this.ready && !this.disabled) { + this.imageData = assign({}, this.initialImageData); + this.canvasData = assign({}, this.initialCanvasData); + this.cropBoxData = assign({}, this.initialCropBoxData); + this.renderCanvas(); + + if (this.cropped) { + this.renderCropBox(); + } + } + + return this; + }, + + + // Clear the crop box + clear: function clear() { + if (this.cropped && !this.disabled) { + assign(this.cropBoxData, { + left: 0, + top: 0, + width: 0, + height: 0 + }); + + this.cropped = false; + this.renderCropBox(); + this.limitCanvas(true, true); + + // Render canvas after crop box rendered + this.renderCanvas(); + removeClass(this.dragBox, CLASS_MODAL); + addClass(this.cropBox, CLASS_HIDDEN); + } + + return this; + }, + + + /** + * Replace the image's src and rebuild the cropper + * @param {string} url - The new URL. + * @param {boolean} [hasSameSize] - Indicate if the new image has the same size as the old one. + * @returns {Cropper} this + */ + replace: function replace(url) { + var hasSameSize = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + if (!this.disabled && url) { + if (this.isImg) { + this.element.src = url; + } + + if (hasSameSize) { + this.url = url; + this.image.src = url; + + if (this.ready) { + this.viewBoxImage.src = url; + + forEach(this.previews, function (element) { + element.getElementsByTagName('img')[0].src = url; + }); + } + } else { + if (this.isImg) { + this.replaced = true; + } + + this.options.data = null; + this.uncreate(); + this.load(url); + } + } + + return this; + }, + + + // Enable (unfreeze) the cropper + enable: function enable() { + if (this.ready && this.disabled) { + this.disabled = false; + removeClass(this.cropper, CLASS_DISABLED); + } + + return this; + }, + + + // Disable (freeze) the cropper + disable: function disable() { + if (this.ready && !this.disabled) { + this.disabled = true; + addClass(this.cropper, CLASS_DISABLED); + } + + return this; + }, + + + /** + * Destroy the cropper and remove the instance from the image + * @returns {Cropper} this + */ + destroy: function destroy() { + var element = this.element; + + + if (!getData(element, NAMESPACE)) { + return this; + } + + if (this.isImg && this.replaced) { + element.src = this.originalUrl; + } + + this.uncreate(); + removeData(element, NAMESPACE); + + return this; + }, + + + /** + * Move the canvas with relative offsets + * @param {number} offsetX - The relative offset distance on the x-axis. + * @param {number} [offsetY=offsetX] - The relative offset distance on the y-axis. + * @returns {Cropper} this + */ + move: function move(offsetX) { + var offsetY = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : offsetX; + var _canvasData = this.canvasData, + left = _canvasData.left, + top = _canvasData.top; + + + return this.moveTo(isUndefined(offsetX) ? offsetX : left + Number(offsetX), isUndefined(offsetY) ? offsetY : top + Number(offsetY)); + }, + + + /** + * Move the canvas to an absolute point + * @param {number} x - The x-axis coordinate. + * @param {number} [y=x] - The y-axis coordinate. + * @returns {Cropper} this + */ + moveTo: function moveTo(x) { + var y = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : x; + var canvasData = this.canvasData; + + var changed = false; + + x = Number(x); + y = Number(y); + + if (this.ready && !this.disabled && this.options.movable) { + if (isNumber(x)) { + canvasData.left = x; + changed = true; + } + + if (isNumber(y)) { + canvasData.top = y; + changed = true; + } + + if (changed) { + this.renderCanvas(true); + } + } + + return this; + }, + + + /** + * Zoom the canvas with a relative ratio + * @param {number} ratio - The target ratio. + * @param {Event} _originalEvent - The original event if any. + * @returns {Cropper} this + */ + zoom: function zoom(ratio, _originalEvent) { + var canvasData = this.canvasData; + + + ratio = Number(ratio); + + if (ratio < 0) { + ratio = 1 / (1 - ratio); + } else { + ratio = 1 + ratio; + } + + return this.zoomTo(canvasData.width * ratio / canvasData.naturalWidth, null, _originalEvent); + }, + + + /** + * Zoom the canvas to an absolute ratio + * @param {number} ratio - The target ratio. + * @param {Object} pivot - The zoom pivot point coordinate. + * @param {Event} _originalEvent - The original event if any. + * @returns {Cropper} this + */ + zoomTo: function zoomTo(ratio, pivot, _originalEvent) { + var options = this.options, + canvasData = this.canvasData; + var width = canvasData.width, + height = canvasData.height, + naturalWidth = canvasData.naturalWidth, + naturalHeight = canvasData.naturalHeight; + + + ratio = Number(ratio); + + if (ratio >= 0 && this.ready && !this.disabled && options.zoomable) { + var newWidth = naturalWidth * ratio; + var newHeight = naturalHeight * ratio; + + if (dispatchEvent(this.element, EVENT_ZOOM, { + originalEvent: _originalEvent, + oldRatio: width / naturalWidth, + ratio: newWidth / naturalWidth + }) === false) { + return this; + } + + if (_originalEvent) { + var pointers = this.pointers; + + var offset = getOffset(this.cropper); + var center = pointers && Object.keys(pointers).length ? getPointersCenter(pointers) : { + pageX: _originalEvent.pageX, + pageY: _originalEvent.pageY + }; + + // Zoom from the triggering point of the event + canvasData.left -= (newWidth - width) * ((center.pageX - offset.left - canvasData.left) / width); + canvasData.top -= (newHeight - height) * ((center.pageY - offset.top - canvasData.top) / height); + } else if (isPlainObject(pivot) && isNumber(pivot.x) && isNumber(pivot.y)) { + canvasData.left -= (newWidth - width) * ((pivot.x - canvasData.left) / width); + canvasData.top -= (newHeight - height) * ((pivot.y - canvasData.top) / height); + } else { + // Zoom from the center of the canvas + canvasData.left -= (newWidth - width) / 2; + canvasData.top -= (newHeight - height) / 2; + } + + canvasData.width = newWidth; + canvasData.height = newHeight; + this.renderCanvas(true); + } + + return this; + }, + + + /** + * Rotate the canvas with a relative degree + * @param {number} degree - The rotate degree. + * @returns {Cropper} this + */ + rotate: function rotate(degree) { + return this.rotateTo((this.imageData.rotate || 0) + Number(degree)); + }, + + + /** + * Rotate the canvas to an absolute degree + * @param {number} degree - The rotate degree. + * @returns {Cropper} this + */ + rotateTo: function rotateTo(degree) { + degree = Number(degree); + + if (isNumber(degree) && this.ready && !this.disabled && this.options.rotatable) { + this.imageData.rotate = degree % 360; + this.renderCanvas(true, true); + } + + return this; + }, + + + /** + * Scale the image on the x-axis. + * @param {number} scaleX - The scale ratio on the x-axis. + * @returns {Cropper} this + */ + scaleX: function scaleX(_scaleX) { + var scaleY = this.imageData.scaleY; + + + return this.scale(_scaleX, isNumber(scaleY) ? scaleY : 1); + }, + + + /** + * Scale the image on the y-axis. + * @param {number} scaleY - The scale ratio on the y-axis. + * @returns {Cropper} this + */ + scaleY: function scaleY(_scaleY) { + var scaleX = this.imageData.scaleX; + + + return this.scale(isNumber(scaleX) ? scaleX : 1, _scaleY); + }, + + + /** + * Scale the image + * @param {number} scaleX - The scale ratio on the x-axis. + * @param {number} [scaleY=scaleX] - The scale ratio on the y-axis. + * @returns {Cropper} this + */ + scale: function scale(scaleX) { + var scaleY = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : scaleX; + var imageData = this.imageData; + + var transformed = false; + + scaleX = Number(scaleX); + scaleY = Number(scaleY); + + if (this.ready && !this.disabled && this.options.scalable) { + if (isNumber(scaleX)) { + imageData.scaleX = scaleX; + transformed = true; + } + + if (isNumber(scaleY)) { + imageData.scaleY = scaleY; + transformed = true; + } + + if (transformed) { + this.renderCanvas(true, true); + } + } + + return this; + }, + + + /** + * Get the cropped area position and size data (base on the original image) + * @param {boolean} [rounded=false] - Indicate if round the data values or not. + * @returns {Object} The result cropped data. + */ + getData: function getData$$1() { + var rounded = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; + var options = this.options, + imageData = this.imageData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + + var data = void 0; + + if (this.ready && this.cropped) { + data = { + x: cropBoxData.left - canvasData.left, + y: cropBoxData.top - canvasData.top, + width: cropBoxData.width, + height: cropBoxData.height + }; + + var ratio = imageData.width / imageData.naturalWidth; + + forEach(data, function (n, i) { + n /= ratio; + data[i] = rounded ? Math.round(n) : n; + }); + } else { + data = { + x: 0, + y: 0, + width: 0, + height: 0 + }; + } + + if (options.rotatable) { + data.rotate = imageData.rotate || 0; + } + + if (options.scalable) { + data.scaleX = imageData.scaleX || 1; + data.scaleY = imageData.scaleY || 1; + } + + return data; + }, + + + /** + * Set the cropped area position and size with new data + * @param {Object} data - The new data. + * @returns {Cropper} this + */ + setData: function setData$$1(data) { + var options = this.options, + imageData = this.imageData, + canvasData = this.canvasData; + + var cropBoxData = {}; + + if (this.ready && !this.disabled && isPlainObject(data)) { + var transformed = false; + + if (options.rotatable) { + if (isNumber(data.rotate) && data.rotate !== imageData.rotate) { + imageData.rotate = data.rotate; + transformed = true; + } + } + + if (options.scalable) { + if (isNumber(data.scaleX) && data.scaleX !== imageData.scaleX) { + imageData.scaleX = data.scaleX; + transformed = true; + } + + if (isNumber(data.scaleY) && data.scaleY !== imageData.scaleY) { + imageData.scaleY = data.scaleY; + transformed = true; + } + } + + if (transformed) { + this.renderCanvas(true, true); + } + + var ratio = imageData.width / imageData.naturalWidth; + + if (isNumber(data.x)) { + cropBoxData.left = data.x * ratio + canvasData.left; + } + + if (isNumber(data.y)) { + cropBoxData.top = data.y * ratio + canvasData.top; + } + + if (isNumber(data.width)) { + cropBoxData.width = data.width * ratio; + } + + if (isNumber(data.height)) { + cropBoxData.height = data.height * ratio; + } + + this.setCropBoxData(cropBoxData); + } + + return this; + }, + + + /** + * Get the container size data. + * @returns {Object} The result container data. + */ + getContainerData: function getContainerData() { + return this.ready ? assign({}, this.containerData) : {}; + }, + + + /** + * Get the image position and size data. + * @returns {Object} The result image data. + */ + getImageData: function getImageData() { + return this.sized ? assign({}, this.imageData) : {}; + }, + + + /** + * Get the canvas position and size data. + * @returns {Object} The result canvas data. + */ + getCanvasData: function getCanvasData() { + var canvasData = this.canvasData; + + var data = {}; + + if (this.ready) { + forEach(['left', 'top', 'width', 'height', 'naturalWidth', 'naturalHeight'], function (n) { + data[n] = canvasData[n]; + }); + } + + return data; + }, + + + /** + * Set the canvas position and size with new data. + * @param {Object} data - The new canvas data. + * @returns {Cropper} this + */ + setCanvasData: function setCanvasData(data) { + var canvasData = this.canvasData; + var aspectRatio = canvasData.aspectRatio; + + + if (this.ready && !this.disabled && isPlainObject(data)) { + if (isNumber(data.left)) { + canvasData.left = data.left; + } + + if (isNumber(data.top)) { + canvasData.top = data.top; + } + + if (isNumber(data.width)) { + canvasData.width = data.width; + canvasData.height = data.width / aspectRatio; + } else if (isNumber(data.height)) { + canvasData.height = data.height; + canvasData.width = data.height * aspectRatio; + } + + this.renderCanvas(true); + } + + return this; + }, + + + /** + * Get the crop box position and size data. + * @returns {Object} The result crop box data. + */ + getCropBoxData: function getCropBoxData() { + var cropBoxData = this.cropBoxData; + + var data = void 0; + + if (this.ready && this.cropped) { + data = { + left: cropBoxData.left, + top: cropBoxData.top, + width: cropBoxData.width, + height: cropBoxData.height + }; + } + + return data || {}; + }, + + + /** + * Set the crop box position and size with new data. + * @param {Object} data - The new crop box data. + * @returns {Cropper} this + */ + setCropBoxData: function setCropBoxData(data) { + var cropBoxData = this.cropBoxData; + var aspectRatio = this.options.aspectRatio; + + var widthChanged = void 0; + var heightChanged = void 0; + + if (this.ready && this.cropped && !this.disabled && isPlainObject(data)) { + if (isNumber(data.left)) { + cropBoxData.left = data.left; + } + + if (isNumber(data.top)) { + cropBoxData.top = data.top; + } + + if (isNumber(data.width) && data.width !== cropBoxData.width) { + widthChanged = true; + cropBoxData.width = data.width; + } + + if (isNumber(data.height) && data.height !== cropBoxData.height) { + heightChanged = true; + cropBoxData.height = data.height; + } + + if (aspectRatio) { + if (widthChanged) { + cropBoxData.height = cropBoxData.width / aspectRatio; + } else if (heightChanged) { + cropBoxData.width = cropBoxData.height * aspectRatio; + } + } + + this.renderCropBox(); + } + + return this; + }, + + + /** + * Get a canvas drawn the cropped image. + * @param {Object} [options={}] - The config options. + * @returns {HTMLCanvasElement} - The result canvas. + */ + getCroppedCanvas: function getCroppedCanvas() { + var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + + if (!this.ready || !window.HTMLCanvasElement) { + return null; + } + + var canvasData = this.canvasData; + + var source = getSourceCanvas(this.image, this.imageData, canvasData, options); + + // Returns the source canvas if it is not cropped. + if (!this.cropped) { + return source; + } + + var _getData = this.getData(), + initialX = _getData.x, + initialY = _getData.y, + initialWidth = _getData.width, + initialHeight = _getData.height; + + var ratio = source.width / Math.floor(canvasData.naturalWidth); + + if (ratio !== 1) { + initialX *= ratio; + initialY *= ratio; + initialWidth *= ratio; + initialHeight *= ratio; + } + + var aspectRatio = initialWidth / initialHeight; + var maxSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.maxWidth || Infinity, + height: options.maxHeight || Infinity + }); + var minSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.minWidth || 0, + height: options.minHeight || 0 + }, 'cover'); + + var _getAdjustedSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.width || (ratio !== 1 ? source.width : initialWidth), + height: options.height || (ratio !== 1 ? source.height : initialHeight) + }), + width = _getAdjustedSizes.width, + height = _getAdjustedSizes.height; + + width = Math.min(maxSizes.width, Math.max(minSizes.width, width)); + height = Math.min(maxSizes.height, Math.max(minSizes.height, height)); + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + + canvas.width = normalizeDecimalNumber(width); + canvas.height = normalizeDecimalNumber(height); + + context.fillStyle = options.fillColor || 'transparent'; + context.fillRect(0, 0, width, height); + + var _options$imageSmoothi = options.imageSmoothingEnabled, + imageSmoothingEnabled = _options$imageSmoothi === undefined ? true : _options$imageSmoothi, + imageSmoothingQuality = options.imageSmoothingQuality; + + + context.imageSmoothingEnabled = imageSmoothingEnabled; + + if (imageSmoothingQuality) { + context.imageSmoothingQuality = imageSmoothingQuality; + } + + // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D.drawImage + var sourceWidth = source.width; + var sourceHeight = source.height; + + // Source canvas parameters + var srcX = initialX; + var srcY = initialY; + var srcWidth = void 0; + var srcHeight = void 0; + + // Destination canvas parameters + var dstX = void 0; + var dstY = void 0; + var dstWidth = void 0; + var dstHeight = void 0; + + if (srcX <= -initialWidth || srcX > sourceWidth) { + srcX = 0; + srcWidth = 0; + dstX = 0; + dstWidth = 0; + } else if (srcX <= 0) { + dstX = -srcX; + srcX = 0; + srcWidth = Math.min(sourceWidth, initialWidth + srcX); + dstWidth = srcWidth; + } else if (srcX <= sourceWidth) { + dstX = 0; + srcWidth = Math.min(initialWidth, sourceWidth - srcX); + dstWidth = srcWidth; + } + + if (srcWidth <= 0 || srcY <= -initialHeight || srcY > sourceHeight) { + srcY = 0; + srcHeight = 0; + dstY = 0; + dstHeight = 0; + } else if (srcY <= 0) { + dstY = -srcY; + srcY = 0; + srcHeight = Math.min(sourceHeight, initialHeight + srcY); + dstHeight = srcHeight; + } else if (srcY <= sourceHeight) { + dstY = 0; + srcHeight = Math.min(initialHeight, sourceHeight - srcY); + dstHeight = srcHeight; + } + + var params = [srcX, srcY, srcWidth, srcHeight]; + + // Avoid "IndexSizeError" + if (dstWidth > 0 && dstHeight > 0) { + var scale = width / initialWidth; + + params.push(dstX * scale, dstY * scale, dstWidth * scale, dstHeight * scale); + } + + // All the numerical parameters should be integer for `drawImage` + // https://github.com/fengyuanchen/cropper/issues/476 + context.drawImage.apply(context, [source].concat(toConsumableArray(params.map(function (param) { + return Math.floor(normalizeDecimalNumber(param)); + })))); + + return canvas; + }, + + + /** + * Change the aspect ratio of the crop box. + * @param {number} aspectRatio - The new aspect ratio. + * @returns {Cropper} this + */ + setAspectRatio: function setAspectRatio(aspectRatio) { + var options = this.options; + + + if (!this.disabled && !isUndefined(aspectRatio)) { + // 0 -> NaN + options.aspectRatio = Math.max(0, aspectRatio) || NaN; + + if (this.ready) { + this.initCropBox(); + + if (this.cropped) { + this.renderCropBox(); + } + } + } + + return this; + }, + + + /** + * Change the drag mode. + * @param {string} mode - The new drag mode. + * @returns {Cropper} this + */ + setDragMode: function setDragMode(mode) { + var options = this.options, + dragBox = this.dragBox, + face = this.face; + + + if (this.ready && !this.disabled) { + var croppable = mode === DRAG_MODE_CROP; + var movable = options.movable && mode === DRAG_MODE_MOVE; + + mode = croppable || movable ? mode : DRAG_MODE_NONE; + + options.dragMode = mode; + setData(dragBox, DATA_ACTION, mode); + toggleClass(dragBox, CLASS_CROP, croppable); + toggleClass(dragBox, CLASS_MOVE, movable); + + if (!options.cropBoxMovable) { + // Sync drag mode to crop box when it is not movable + setData(face, DATA_ACTION, mode); + toggleClass(face, CLASS_CROP, croppable); + toggleClass(face, CLASS_MOVE, movable); + } + } + + return this; + } +}; + +var AnotherCropper = WINDOW.Cropper; + +var Cropper = function () { + /** + * Create a new Cropper. + * @param {Element} element - The target element for cropping. + * @param {Object} [options={}] - The configuration options. + */ + function Cropper(element) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + classCallCheck(this, Cropper); + + if (!element || !REGEXP_TAG_NAME.test(element.tagName)) { + throw new Error('The first argument is required and must be an or element.'); + } + + this.element = element; + this.options = assign({}, DEFAULTS, isPlainObject(options) && options); + this.cropped = false; + this.disabled = false; + this.pointers = {}; + this.ready = false; + this.reloading = false; + this.replaced = false; + this.sized = false; + this.sizing = false; + this.init(); + } + + createClass(Cropper, [{ + key: 'init', + value: function init() { + var element = this.element; + + var tagName = element.tagName.toLowerCase(); + var url = void 0; + + if (getData(element, NAMESPACE)) { + return; + } + + setData(element, NAMESPACE, this); + + if (tagName === 'img') { + this.isImg = true; + + // e.g.: "img/picture.jpg" + url = element.getAttribute('src') || ''; + this.originalUrl = url; + + // Stop when it's a blank image + if (!url) { + return; + } + + // e.g.: "http://example.com/img/picture.jpg" + url = element.src; + } else if (tagName === 'canvas' && window.HTMLCanvasElement) { + url = element.toDataURL(); + } + + this.load(url); + } + }, { + key: 'load', + value: function load(url) { + var _this = this; + + if (!url) { + return; + } + + this.url = url; + this.imageData = {}; + + var element = this.element, + options = this.options; + + + if (!options.checkOrientation || !window.ArrayBuffer) { + this.clone(); + return; + } + + // XMLHttpRequest disallows to open a Data URL in some browsers like IE11 and Safari + if (REGEXP_DATA_URL.test(url)) { + if (REGEXP_DATA_URL_JPEG.test(url)) { + this.read(dataURLToArrayBuffer(url)); + } else { + this.clone(); + } + + return; + } + + var xhr = new XMLHttpRequest(); + + this.reloading = true; + this.xhr = xhr; + + var done = function done() { + _this.reloading = false; + _this.xhr = null; + }; + + xhr.ontimeout = done; + xhr.onabort = done; + xhr.onerror = function () { + done(); + _this.clone(); + }; + + xhr.onload = function () { + done(); + _this.read(xhr.response); + }; + + // Bust cache when there is a "crossOrigin" property + if (options.checkCrossOrigin && isCrossOriginURL(url) && element.crossOrigin) { + url = addTimestamp(url); + } + + xhr.open('get', url); + xhr.responseType = 'arraybuffer'; + xhr.withCredentials = element.crossOrigin === 'use-credentials'; + xhr.send(); + } + }, { + key: 'read', + value: function read(arrayBuffer) { + var options = this.options, + imageData = this.imageData; + + var orientation = getOrientation(arrayBuffer); + var rotate = 0; + var scaleX = 1; + var scaleY = 1; + + if (orientation > 1) { + this.url = arrayBufferToDataURL(arrayBuffer, 'image/jpeg'); + + var _parseOrientation = parseOrientation(orientation); + + rotate = _parseOrientation.rotate; + scaleX = _parseOrientation.scaleX; + scaleY = _parseOrientation.scaleY; + } + + if (options.rotatable) { + imageData.rotate = rotate; + } + + if (options.scalable) { + imageData.scaleX = scaleX; + imageData.scaleY = scaleY; + } + + this.clone(); + } + }, { + key: 'clone', + value: function clone() { + var element = this.element, + url = this.url; + + var crossOrigin = void 0; + var crossOriginUrl = void 0; + + if (this.options.checkCrossOrigin && isCrossOriginURL(url)) { + crossOrigin = element.crossOrigin; + + + if (crossOrigin) { + crossOriginUrl = url; + } else { + crossOrigin = 'anonymous'; + + // Bust cache when there is not a "crossOrigin" property + crossOriginUrl = addTimestamp(url); + } + } + + this.crossOrigin = crossOrigin; + this.crossOriginUrl = crossOriginUrl; + + var image = document.createElement('img'); + + if (crossOrigin) { + image.crossOrigin = crossOrigin; + } + + image.src = crossOriginUrl || url; + + var start = this.start.bind(this); + var stop = this.stop.bind(this); + + this.image = image; + this.onStart = start; + this.onStop = stop; + + if (this.isImg) { + if (element.complete) { + // start asynchronously to keep `this.cropper` is accessible in `ready` event handler. + this.timeout = setTimeout(start, 0); + } else { + addListener(element, EVENT_LOAD, start, { + once: true + }); + } + } else { + image.onload = start; + image.onerror = stop; + addClass(image, CLASS_HIDE); + element.parentNode.insertBefore(image, element.nextSibling); + } + } + }, { + key: 'start', + value: function start(event) { + var _this2 = this; + + var image = this.isImg ? this.element : this.image; + + if (event) { + image.onload = null; + image.onerror = null; + } + + this.sizing = true; + + var IS_SAFARI = WINDOW.navigator && /(Macintosh|iPhone|iPod|iPad).*AppleWebKit/i.test(WINDOW.navigator.userAgent); + var done = function done(naturalWidth, naturalHeight) { + assign(_this2.imageData, { + naturalWidth: naturalWidth, + naturalHeight: naturalHeight, + aspectRatio: naturalWidth / naturalHeight + }); + _this2.sizing = false; + _this2.sized = true; + _this2.build(); + }; + + // Modern browsers (except Safari) + if (image.naturalWidth && !IS_SAFARI) { + done(image.naturalWidth, image.naturalHeight); + return; + } + + var sizingImage = document.createElement('img'); + var body = document.body || document.documentElement; + + this.sizingImage = sizingImage; + + sizingImage.onload = function () { + done(sizingImage.width, sizingImage.height); + + if (!IS_SAFARI) { + body.removeChild(sizingImage); + } + }; + + sizingImage.src = image.src; + + // iOS Safari will convert the image automatically + // with its orientation once append it into DOM (#279) + if (!IS_SAFARI) { + sizingImage.style.cssText = 'left:0;' + 'max-height:none!important;' + 'max-width:none!important;' + 'min-height:0!important;' + 'min-width:0!important;' + 'opacity:0;' + 'position:absolute;' + 'top:0;' + 'z-index:-1;'; + body.appendChild(sizingImage); + } + } + }, { + key: 'stop', + value: function stop() { + var image = this.image; + + + image.onload = null; + image.onerror = null; + image.parentNode.removeChild(image); + this.image = null; + } + }, { + key: 'build', + value: function build() { + if (!this.sized || this.ready) { + return; + } + + var element = this.element, + options = this.options, + image = this.image; + + // Create cropper elements + + var container = element.parentNode; + var template = document.createElement('div'); + + template.innerHTML = TEMPLATE; + + var cropper = template.querySelector('.' + NAMESPACE + '-container'); + var canvas = cropper.querySelector('.' + NAMESPACE + '-canvas'); + var dragBox = cropper.querySelector('.' + NAMESPACE + '-drag-box'); + var cropBox = cropper.querySelector('.' + NAMESPACE + '-crop-box'); + var face = cropBox.querySelector('.' + NAMESPACE + '-face'); + + this.container = container; + this.cropper = cropper; + this.canvas = canvas; + this.dragBox = dragBox; + this.cropBox = cropBox; + this.viewBox = cropper.querySelector('.' + NAMESPACE + '-view-box'); + this.face = face; + + canvas.appendChild(image); + + // Hide the original image + addClass(element, CLASS_HIDDEN); + + // Inserts the cropper after to the current image + container.insertBefore(cropper, element.nextSibling); + + // Show the image if is hidden + if (!this.isImg) { + removeClass(image, CLASS_HIDE); + } + + this.initPreview(); + this.bind(); + + options.aspectRatio = Math.max(0, options.aspectRatio) || NaN; + options.viewMode = Math.max(0, Math.min(3, Math.round(options.viewMode))) || 0; + + addClass(cropBox, CLASS_HIDDEN); + + if (!options.guides) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-dashed'), CLASS_HIDDEN); + } + + if (!options.center) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-center'), CLASS_HIDDEN); + } + + if (options.background) { + addClass(cropper, NAMESPACE + '-bg'); + } + + if (!options.highlight) { + addClass(face, CLASS_INVISIBLE); + } + + if (options.cropBoxMovable) { + addClass(face, CLASS_MOVE); + setData(face, DATA_ACTION, ACTION_ALL); + } + + if (!options.cropBoxResizable) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-line'), CLASS_HIDDEN); + addClass(cropBox.getElementsByClassName(NAMESPACE + '-point'), CLASS_HIDDEN); + } + + this.render(); + this.ready = true; + this.setDragMode(options.dragMode); + + if (options.autoCrop) { + this.crop(); + } + + this.setData(options.data); + + if (isFunction(options.ready)) { + addListener(element, EVENT_READY, options.ready, { + once: true + }); + } + + dispatchEvent(element, EVENT_READY); + } + }, { + key: 'unbuild', + value: function unbuild() { + if (!this.ready) { + return; + } + + this.ready = false; + this.unbind(); + this.resetPreview(); + this.cropper.parentNode.removeChild(this.cropper); + removeClass(this.element, CLASS_HIDDEN); + } + }, { + key: 'uncreate', + value: function uncreate() { + var element = this.element; + + + if (this.ready) { + this.unbuild(); + this.ready = false; + this.cropped = false; + } else if (this.sizing) { + this.sizingImage.onload = null; + this.sizing = false; + this.sized = false; + } else if (this.reloading) { + this.xhr.abort(); + } else if (this.isImg) { + if (element.complete) { + clearTimeout(this.timeout); + } else { + removeListener(element, EVENT_LOAD, this.onStart); + } + } else if (this.image) { + this.stop(); + } + } + + /** + * Get the no conflict cropper class. + * @returns {Cropper} The cropper class. + */ + + }], [{ + key: 'noConflict', + value: function noConflict() { + window.Cropper = AnotherCropper; + return Cropper; + } + + /** + * Change the default options. + * @param {Object} options - The new default options. + */ + + }, { + key: 'setDefaults', + value: function setDefaults(options) { + assign(DEFAULTS, isPlainObject(options) && options); + } + }]); + return Cropper; +}(); + +assign(Cropper.prototype, render, preview, events, handlers, change, methods); + +if ($.fn) { + var AnotherCropper$1 = $.fn.cropper; + var NAMESPACE$1 = 'cropper'; + + $.fn.cropper = function jQueryCropper(option) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + var result = void 0; + + this.each(function (i, element) { + var $element = $(element); + var isDestroy = option === 'destroy'; + var cropper = $element.data(NAMESPACE$1); + + if (!cropper) { + if (isDestroy) { + return; + } + + var options = $.extend({}, $element.data(), $.isPlainObject(option) && option); + + cropper = new Cropper(element, options); + $element.data(NAMESPACE$1, cropper); + } + + if (typeof option === 'string') { + var fn = cropper[option]; + + if ($.isFunction(fn)) { + result = fn.apply(cropper, args); + + if (result === cropper) { + result = undefined; + } + + if (isDestroy) { + $element.removeData(NAMESPACE$1); + } + } + } + }); + + return result !== undefined ? result : this; + }; + + $.fn.cropper.Constructor = Cropper; + $.fn.cropper.setDefaults = Cropper.setDefaults; + $.fn.cropper.noConflict = function noConflict() { + $.fn.cropper = AnotherCropper$1; + return this; + }; +} diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.js new file mode 100644 index 0000000000000000000000000000000000000000..36881f741fab3b7063f5e8bef9288396bf737152 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.js @@ -0,0 +1,3761 @@ +/*! + * Cropper v4.0.0 + * https://github.com/fengyuanchen/cropper + * + * Copyright (c) 2014-2018 Chen Fengyuan + * Released under the MIT license + * + * Date: 2018-04-01T06:27:27.267Z + */ + +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) : + typeof define === 'function' && define.amd ? define(['jquery'], factory) : + (factory(global.jQuery)); +}(this, (function ($) { 'use strict'; + + $ = $ && $.hasOwnProperty('default') ? $['default'] : $; + + var IN_BROWSER = typeof window !== 'undefined'; + var WINDOW = IN_BROWSER ? window : {}; + var NAMESPACE = 'cropper'; + + // Actions + var ACTION_ALL = 'all'; + var ACTION_CROP = 'crop'; + var ACTION_MOVE = 'move'; + var ACTION_ZOOM = 'zoom'; + var ACTION_EAST = 'e'; + var ACTION_WEST = 'w'; + var ACTION_SOUTH = 's'; + var ACTION_NORTH = 'n'; + var ACTION_NORTH_EAST = 'ne'; + var ACTION_NORTH_WEST = 'nw'; + var ACTION_SOUTH_EAST = 'se'; + var ACTION_SOUTH_WEST = 'sw'; + + // Classes + var CLASS_CROP = NAMESPACE + '-crop'; + var CLASS_DISABLED = NAMESPACE + '-disabled'; + var CLASS_HIDDEN = NAMESPACE + '-hidden'; + var CLASS_HIDE = NAMESPACE + '-hide'; + var CLASS_INVISIBLE = NAMESPACE + '-invisible'; + var CLASS_MODAL = NAMESPACE + '-modal'; + var CLASS_MOVE = NAMESPACE + '-move'; + + // Data keys + var DATA_ACTION = 'action'; + var DATA_PREVIEW = 'preview'; + + // Drag modes + var DRAG_MODE_CROP = 'crop'; + var DRAG_MODE_MOVE = 'move'; + var DRAG_MODE_NONE = 'none'; + + // Events + var EVENT_CROP = 'crop'; + var EVENT_CROP_END = 'cropend'; + var EVENT_CROP_MOVE = 'cropmove'; + var EVENT_CROP_START = 'cropstart'; + var EVENT_DBLCLICK = 'dblclick'; + var EVENT_LOAD = 'load'; + var EVENT_POINTER_DOWN = WINDOW.PointerEvent ? 'pointerdown' : 'touchstart mousedown'; + var EVENT_POINTER_MOVE = WINDOW.PointerEvent ? 'pointermove' : 'touchmove mousemove'; + var EVENT_POINTER_UP = WINDOW.PointerEvent ? 'pointerup pointercancel' : 'touchend touchcancel mouseup'; + var EVENT_READY = 'ready'; + var EVENT_RESIZE = 'resize'; + var EVENT_WHEEL = 'wheel mousewheel DOMMouseScroll'; + var EVENT_ZOOM = 'zoom'; + + // RegExps + var REGEXP_ACTIONS = /^(?:e|w|s|n|se|sw|ne|nw|all|crop|move|zoom)$/; + var REGEXP_DATA_URL = /^data:/; + var REGEXP_DATA_URL_JPEG = /^data:image\/jpeg;base64,/; + var REGEXP_TAG_NAME = /^(?:img|canvas)$/i; + + var DEFAULTS = { + // Define the view mode of the cropper + viewMode: 0, // 0, 1, 2, 3 + + // Define the dragging mode of the cropper + dragMode: DRAG_MODE_CROP, // 'crop', 'move' or 'none' + + // Define the aspect ratio of the crop box + aspectRatio: NaN, + + // An object with the previous cropping result data + data: null, + + // A selector for adding extra containers to preview + preview: '', + + // Re-render the cropper when resize the window + responsive: true, + + // Restore the cropped area after resize the window + restore: true, + + // Check if the current image is a cross-origin image + checkCrossOrigin: true, + + // Check the current image's Exif Orientation information + checkOrientation: true, + + // Show the black modal + modal: true, + + // Show the dashed lines for guiding + guides: true, + + // Show the center indicator for guiding + center: true, + + // Show the white modal to highlight the crop box + highlight: true, + + // Show the grid background + background: true, + + // Enable to crop the image automatically when initialize + autoCrop: true, + + // Define the percentage of automatic cropping area when initializes + autoCropArea: 0.8, + + // Enable to move the image + movable: true, + + // Enable to rotate the image + rotatable: true, + + // Enable to scale the image + scalable: true, + + // Enable to zoom the image + zoomable: true, + + // Enable to zoom the image by dragging touch + zoomOnTouch: true, + + // Enable to zoom the image by wheeling mouse + zoomOnWheel: true, + + // Define zoom ratio when zoom the image by wheeling mouse + wheelZoomRatio: 0.1, + + // Enable to move the crop box + cropBoxMovable: true, + + // Enable to resize the crop box + cropBoxResizable: true, + + // Toggle drag mode between "crop" and "move" when click twice on the cropper + toggleDragModeOnDblclick: true, + + // Size limitation + minCanvasWidth: 0, + minCanvasHeight: 0, + minCropBoxWidth: 0, + minCropBoxHeight: 0, + minContainerWidth: 200, + minContainerHeight: 100, + + // Shortcuts of events + ready: null, + cropstart: null, + cropmove: null, + cropend: null, + crop: null, + zoom: null + }; + + var TEMPLATE = '
' + '
' + '
' + '
' + '
' + '
' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '
' + '
'; + + var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { + return typeof obj; + } : function (obj) { + return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; + }; + + var classCallCheck = function (instance, Constructor) { + if (!(instance instanceof Constructor)) { + throw new TypeError("Cannot call a class as a function"); + } + }; + + var createClass = function () { + function defineProperties(target, props) { + for (var i = 0; i < props.length; i++) { + var descriptor = props[i]; + descriptor.enumerable = descriptor.enumerable || false; + descriptor.configurable = true; + if ("value" in descriptor) descriptor.writable = true; + Object.defineProperty(target, descriptor.key, descriptor); + } + } + + return function (Constructor, protoProps, staticProps) { + if (protoProps) defineProperties(Constructor.prototype, protoProps); + if (staticProps) defineProperties(Constructor, staticProps); + return Constructor; + }; + }(); + + var toConsumableArray = function (arr) { + if (Array.isArray(arr)) { + for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; + + return arr2; + } else { + return Array.from(arr); + } + }; + + /** + * Check if the given value is not a number. + */ + var isNaN = Number.isNaN || WINDOW.isNaN; + + /** + * Check if the given value is a number. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a number, else `false`. + */ + function isNumber(value) { + return typeof value === 'number' && !isNaN(value); + } + + /** + * Check if the given value is undefined. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is undefined, else `false`. + */ + function isUndefined(value) { + return typeof value === 'undefined'; + } + + /** + * Check if the given value is an object. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is an object, else `false`. + */ + function isObject(value) { + return (typeof value === 'undefined' ? 'undefined' : _typeof(value)) === 'object' && value !== null; + } + + var hasOwnProperty = Object.prototype.hasOwnProperty; + + /** + * Check if the given value is a plain object. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a plain object, else `false`. + */ + + function isPlainObject(value) { + if (!isObject(value)) { + return false; + } + + try { + var _constructor = value.constructor; + var prototype = _constructor.prototype; + + + return _constructor && prototype && hasOwnProperty.call(prototype, 'isPrototypeOf'); + } catch (e) { + return false; + } + } + + /** + * Check if the given value is a function. + * @param {*} value - The value to check. + * @returns {boolean} Returns `true` if the given value is a function, else `false`. + */ + function isFunction(value) { + return typeof value === 'function'; + } + + /** + * Iterate the given data. + * @param {*} data - The data to iterate. + * @param {Function} callback - The process function for each element. + * @returns {*} The original data. + */ + function forEach(data, callback) { + if (data && isFunction(callback)) { + if (Array.isArray(data) || isNumber(data.length) /* array-like */) { + var length = data.length; + + var i = void 0; + + for (i = 0; i < length; i += 1) { + if (callback.call(data, data[i], i, data) === false) { + break; + } + } + } else if (isObject(data)) { + Object.keys(data).forEach(function (key) { + callback.call(data, data[key], key, data); + }); + } + } + + return data; + } + + /** + * Extend the given object. + * @param {*} obj - The object to be extended. + * @param {*} args - The rest objects which will be merged to the first object. + * @returns {Object} The extended object. + */ + var assign = Object.assign || function assign(obj) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + if (isObject(obj) && args.length > 0) { + args.forEach(function (arg) { + if (isObject(arg)) { + Object.keys(arg).forEach(function (key) { + obj[key] = arg[key]; + }); + } + }); + } + + return obj; + }; + + var REGEXP_DECIMALS = /\.\d*(?:0|9){12}\d*$/i; + + /** + * Normalize decimal number. + * Check out {@link http://0.30000000000000004.com/} + * @param {number} value - The value to normalize. + * @param {number} [times=100000000000] - The times for normalizing. + * @returns {number} Returns the normalized number. + */ + function normalizeDecimalNumber(value) { + var times = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 100000000000; + + return REGEXP_DECIMALS.test(value) ? Math.round(value * times) / times : value; + } + + var REGEXP_SUFFIX = /^(?:width|height|left|top|marginLeft|marginTop)$/; + + /** + * Apply styles to the given element. + * @param {Element} element - The target element. + * @param {Object} styles - The styles for applying. + */ + function setStyle(element, styles) { + var style = element.style; + + + forEach(styles, function (value, property) { + if (REGEXP_SUFFIX.test(property) && isNumber(value)) { + value += 'px'; + } + + style[property] = value; + }); + } + + /** + * Check if the given element has a special class. + * @param {Element} element - The element to check. + * @param {string} value - The class to search. + * @returns {boolean} Returns `true` if the special class was found. + */ + function hasClass(element, value) { + return element.classList ? element.classList.contains(value) : element.className.indexOf(value) > -1; + } + + /** + * Add classes to the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be added. + */ + function addClass(element, value) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + addClass(elem, value); + }); + return; + } + + if (element.classList) { + element.classList.add(value); + return; + } + + var className = element.className.trim(); + + if (!className) { + element.className = value; + } else if (className.indexOf(value) < 0) { + element.className = className + ' ' + value; + } + } + + /** + * Remove classes from the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be removed. + */ + function removeClass(element, value) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + removeClass(elem, value); + }); + return; + } + + if (element.classList) { + element.classList.remove(value); + return; + } + + if (element.className.indexOf(value) >= 0) { + element.className = element.className.replace(value, ''); + } + } + + /** + * Add or remove classes from the given element. + * @param {Element} element - The target element. + * @param {string} value - The classes to be toggled. + * @param {boolean} added - Add only. + */ + function toggleClass(element, value, added) { + if (!value) { + return; + } + + if (isNumber(element.length)) { + forEach(element, function (elem) { + toggleClass(elem, value, added); + }); + return; + } + + // IE10-11 doesn't support the second parameter of `classList.toggle` + if (added) { + addClass(element, value); + } else { + removeClass(element, value); + } + } + + var REGEXP_HYPHENATE = /([a-z\d])([A-Z])/g; + + /** + * Transform the given string from camelCase to kebab-case + * @param {string} value - The value to transform. + * @returns {string} The transformed value. + */ + function hyphenate(value) { + return value.replace(REGEXP_HYPHENATE, '$1-$2').toLowerCase(); + } + + /** + * Get data from the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to get. + * @returns {string} The data value. + */ + function getData(element, name) { + if (isObject(element[name])) { + return element[name]; + } else if (element.dataset) { + return element.dataset[name]; + } + + return element.getAttribute('data-' + hyphenate(name)); + } + + /** + * Set data to the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to set. + * @param {string} data - The data value. + */ + function setData(element, name, data) { + if (isObject(data)) { + element[name] = data; + } else if (element.dataset) { + element.dataset[name] = data; + } else { + element.setAttribute('data-' + hyphenate(name), data); + } + } + + /** + * Remove data from the given element. + * @param {Element} element - The target element. + * @param {string} name - The data key to remove. + */ + function removeData(element, name) { + if (isObject(element[name])) { + try { + delete element[name]; + } catch (e) { + element[name] = undefined; + } + } else if (element.dataset) { + // #128 Safari not allows to delete dataset property + try { + delete element.dataset[name]; + } catch (e) { + element.dataset[name] = undefined; + } + } else { + element.removeAttribute('data-' + hyphenate(name)); + } + } + + var REGEXP_SPACES = /\s\s*/; + var onceSupported = function () { + var supported = false; + + if (IN_BROWSER) { + var once = false; + var listener = function listener() {}; + var options = Object.defineProperty({}, 'once', { + get: function get$$1() { + supported = true; + return once; + }, + + + /** + * This setter can fix a `TypeError` in strict mode + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Getter_only} + * @param {boolean} value - The value to set + */ + set: function set$$1(value) { + once = value; + } + }); + + WINDOW.addEventListener('test', listener, options); + WINDOW.removeEventListener('test', listener, options); + } + + return supported; + }(); + + /** + * Remove event listener from the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Function} listener - The event listener. + * @param {Object} options - The event options. + */ + function removeListener(element, type, listener) { + var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var handler = listener; + + type.trim().split(REGEXP_SPACES).forEach(function (event) { + if (!onceSupported) { + var listeners = element.listeners; + + + if (listeners && listeners[event] && listeners[event][listener]) { + handler = listeners[event][listener]; + delete listeners[event][listener]; + + if (Object.keys(listeners[event]).length === 0) { + delete listeners[event]; + } + + if (Object.keys(listeners).length === 0) { + delete element.listeners; + } + } + } + + element.removeEventListener(event, handler, options); + }); + } + + /** + * Add event listener to the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Function} listener - The event listener. + * @param {Object} options - The event options. + */ + function addListener(element, type, listener) { + var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var _handler = listener; + + type.trim().split(REGEXP_SPACES).forEach(function (event) { + if (options.once && !onceSupported) { + var _element$listeners = element.listeners, + listeners = _element$listeners === undefined ? {} : _element$listeners; + + + _handler = function handler() { + for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { + args[_key2] = arguments[_key2]; + } + + delete listeners[event][listener]; + element.removeEventListener(event, _handler, options); + listener.apply(element, args); + }; + + if (!listeners[event]) { + listeners[event] = {}; + } + + if (listeners[event][listener]) { + element.removeEventListener(event, listeners[event][listener], options); + } + + listeners[event][listener] = _handler; + element.listeners = listeners; + } + + element.addEventListener(event, _handler, options); + }); + } + + /** + * Dispatch event on the target element. + * @param {Element} element - The event target. + * @param {string} type - The event type(s). + * @param {Object} data - The additional event data. + * @returns {boolean} Indicate if the event is default prevented or not. + */ + function dispatchEvent(element, type, data) { + var event = void 0; + + // Event and CustomEvent on IE9-11 are global objects, not constructors + if (isFunction(Event) && isFunction(CustomEvent)) { + event = new CustomEvent(type, { + detail: data, + bubbles: true, + cancelable: true + }); + } else { + event = document.createEvent('CustomEvent'); + event.initCustomEvent(type, true, true, data); + } + + return element.dispatchEvent(event); + } + + /** + * Get the offset base on the document. + * @param {Element} element - The target element. + * @returns {Object} The offset data. + */ + function getOffset(element) { + var box = element.getBoundingClientRect(); + + return { + left: box.left + (window.pageXOffset - document.documentElement.clientLeft), + top: box.top + (window.pageYOffset - document.documentElement.clientTop) + }; + } + + var location = WINDOW.location; + + var REGEXP_ORIGINS = /^(https?:)\/\/([^:/?#]+):?(\d*)/i; + + /** + * Check if the given URL is a cross origin URL. + * @param {string} url - The target URL. + * @returns {boolean} Returns `true` if the given URL is a cross origin URL, else `false`. + */ + function isCrossOriginURL(url) { + var parts = url.match(REGEXP_ORIGINS); + + return parts && (parts[1] !== location.protocol || parts[2] !== location.hostname || parts[3] !== location.port); + } + + /** + * Add timestamp to the given URL. + * @param {string} url - The target URL. + * @returns {string} The result URL. + */ + function addTimestamp(url) { + var timestamp = 'timestamp=' + new Date().getTime(); + + return url + (url.indexOf('?') === -1 ? '?' : '&') + timestamp; + } + + /** + * Get transforms base on the given object. + * @param {Object} obj - The target object. + * @returns {string} A string contains transform values. + */ + function getTransforms(_ref) { + var rotate = _ref.rotate, + scaleX = _ref.scaleX, + scaleY = _ref.scaleY, + translateX = _ref.translateX, + translateY = _ref.translateY; + + var values = []; + + if (isNumber(translateX) && translateX !== 0) { + values.push('translateX(' + translateX + 'px)'); + } + + if (isNumber(translateY) && translateY !== 0) { + values.push('translateY(' + translateY + 'px)'); + } + + // Rotate should come first before scale to match orientation transform + if (isNumber(rotate) && rotate !== 0) { + values.push('rotate(' + rotate + 'deg)'); + } + + if (isNumber(scaleX) && scaleX !== 1) { + values.push('scaleX(' + scaleX + ')'); + } + + if (isNumber(scaleY) && scaleY !== 1) { + values.push('scaleY(' + scaleY + ')'); + } + + var transform = values.length ? values.join(' ') : 'none'; + + return { + WebkitTransform: transform, + msTransform: transform, + transform: transform + }; + } + + /** + * Get the max ratio of a group of pointers. + * @param {string} pointers - The target pointers. + * @returns {number} The result ratio. + */ + function getMaxZoomRatio(pointers) { + var pointers2 = assign({}, pointers); + var ratios = []; + + forEach(pointers, function (pointer, pointerId) { + delete pointers2[pointerId]; + + forEach(pointers2, function (pointer2) { + var x1 = Math.abs(pointer.startX - pointer2.startX); + var y1 = Math.abs(pointer.startY - pointer2.startY); + var x2 = Math.abs(pointer.endX - pointer2.endX); + var y2 = Math.abs(pointer.endY - pointer2.endY); + var z1 = Math.sqrt(x1 * x1 + y1 * y1); + var z2 = Math.sqrt(x2 * x2 + y2 * y2); + var ratio = (z2 - z1) / z1; + + ratios.push(ratio); + }); + }); + + ratios.sort(function (a, b) { + return Math.abs(a) < Math.abs(b); + }); + + return ratios[0]; + } + + /** + * Get a pointer from an event object. + * @param {Object} event - The target event object. + * @param {boolean} endOnly - Indicates if only returns the end point coordinate or not. + * @returns {Object} The result pointer contains start and/or end point coordinates. + */ + function getPointer(_ref2, endOnly) { + var pageX = _ref2.pageX, + pageY = _ref2.pageY; + + var end = { + endX: pageX, + endY: pageY + }; + + return endOnly ? end : assign({ + startX: pageX, + startY: pageY + }, end); + } + + /** + * Get the center point coordinate of a group of pointers. + * @param {Object} pointers - The target pointers. + * @returns {Object} The center point coordinate. + */ + function getPointersCenter(pointers) { + var pageX = 0; + var pageY = 0; + var count = 0; + + forEach(pointers, function (_ref3) { + var startX = _ref3.startX, + startY = _ref3.startY; + + pageX += startX; + pageY += startY; + count += 1; + }); + + pageX /= count; + pageY /= count; + + return { + pageX: pageX, + pageY: pageY + }; + } + + /** + * Check if the given value is a finite number. + */ + var isFinite = Number.isFinite || WINDOW.isFinite; + + /** + * Get the max sizes in a rectangle under the given aspect ratio. + * @param {Object} data - The original sizes. + * @param {string} [type='contain'] - The adjust type. + * @returns {Object} The result sizes. + */ + function getAdjustedSizes(_ref4) // or 'cover' + { + var aspectRatio = _ref4.aspectRatio, + height = _ref4.height, + width = _ref4.width; + var type = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'contain'; + + var isValidNumber = function isValidNumber(value) { + return isFinite(value) && value > 0; + }; + + if (isValidNumber(width) && isValidNumber(height)) { + var adjustedWidth = height * aspectRatio; + + if (type === 'contain' && adjustedWidth > width || type === 'cover' && adjustedWidth < width) { + height = width / aspectRatio; + } else { + width = height * aspectRatio; + } + } else if (isValidNumber(width)) { + height = width / aspectRatio; + } else if (isValidNumber(height)) { + width = height * aspectRatio; + } + + return { + width: width, + height: height + }; + } + + /** + * Get the new sizes of a rectangle after rotated. + * @param {Object} data - The original sizes. + * @returns {Object} The result sizes. + */ + function getRotatedSizes(_ref5) { + var width = _ref5.width, + height = _ref5.height, + degree = _ref5.degree; + + degree = Math.abs(degree) % 180; + + if (degree === 90) { + return { + width: height, + height: width + }; + } + + var arc = degree % 90 * Math.PI / 180; + var sinArc = Math.sin(arc); + var cosArc = Math.cos(arc); + var newWidth = width * cosArc + height * sinArc; + var newHeight = width * sinArc + height * cosArc; + + return degree > 90 ? { + width: newHeight, + height: newWidth + } : { + width: newWidth, + height: newHeight + }; + } + + /** + * Get a canvas which drew the given image. + * @param {HTMLImageElement} image - The image for drawing. + * @param {Object} imageData - The image data. + * @param {Object} canvasData - The canvas data. + * @param {Object} options - The options. + * @returns {HTMLCanvasElement} The result canvas. + */ + function getSourceCanvas(image, _ref6, _ref7, _ref8) { + var imageAspectRatio = _ref6.aspectRatio, + imageNaturalWidth = _ref6.naturalWidth, + imageNaturalHeight = _ref6.naturalHeight, + _ref6$rotate = _ref6.rotate, + rotate = _ref6$rotate === undefined ? 0 : _ref6$rotate, + _ref6$scaleX = _ref6.scaleX, + scaleX = _ref6$scaleX === undefined ? 1 : _ref6$scaleX, + _ref6$scaleY = _ref6.scaleY, + scaleY = _ref6$scaleY === undefined ? 1 : _ref6$scaleY; + var aspectRatio = _ref7.aspectRatio, + naturalWidth = _ref7.naturalWidth, + naturalHeight = _ref7.naturalHeight; + var _ref8$fillColor = _ref8.fillColor, + fillColor = _ref8$fillColor === undefined ? 'transparent' : _ref8$fillColor, + _ref8$imageSmoothingE = _ref8.imageSmoothingEnabled, + imageSmoothingEnabled = _ref8$imageSmoothingE === undefined ? true : _ref8$imageSmoothingE, + _ref8$imageSmoothingQ = _ref8.imageSmoothingQuality, + imageSmoothingQuality = _ref8$imageSmoothingQ === undefined ? 'low' : _ref8$imageSmoothingQ, + _ref8$maxWidth = _ref8.maxWidth, + maxWidth = _ref8$maxWidth === undefined ? Infinity : _ref8$maxWidth, + _ref8$maxHeight = _ref8.maxHeight, + maxHeight = _ref8$maxHeight === undefined ? Infinity : _ref8$maxHeight, + _ref8$minWidth = _ref8.minWidth, + minWidth = _ref8$minWidth === undefined ? 0 : _ref8$minWidth, + _ref8$minHeight = _ref8.minHeight, + minHeight = _ref8$minHeight === undefined ? 0 : _ref8$minHeight; + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + var maxSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: maxWidth, + height: maxHeight + }); + var minSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: minWidth, + height: minHeight + }, 'cover'); + var width = Math.min(maxSizes.width, Math.max(minSizes.width, naturalWidth)); + var height = Math.min(maxSizes.height, Math.max(minSizes.height, naturalHeight)); + + // Note: should always use image's natural sizes for drawing as + // imageData.naturalWidth === canvasData.naturalHeight when rotate % 180 === 90 + var destMaxSizes = getAdjustedSizes({ + aspectRatio: imageAspectRatio, + width: maxWidth, + height: maxHeight + }); + var destMinSizes = getAdjustedSizes({ + aspectRatio: imageAspectRatio, + width: minWidth, + height: minHeight + }, 'cover'); + var destWidth = Math.min(destMaxSizes.width, Math.max(destMinSizes.width, imageNaturalWidth)); + var destHeight = Math.min(destMaxSizes.height, Math.max(destMinSizes.height, imageNaturalHeight)); + var params = [-destWidth / 2, -destHeight / 2, destWidth, destHeight]; + + canvas.width = normalizeDecimalNumber(width); + canvas.height = normalizeDecimalNumber(height); + context.fillStyle = fillColor; + context.fillRect(0, 0, width, height); + context.save(); + context.translate(width / 2, height / 2); + context.rotate(rotate * Math.PI / 180); + context.scale(scaleX, scaleY); + context.imageSmoothingEnabled = imageSmoothingEnabled; + context.imageSmoothingQuality = imageSmoothingQuality; + context.drawImage.apply(context, [image].concat(toConsumableArray(params.map(function (param) { + return Math.floor(normalizeDecimalNumber(param)); + })))); + context.restore(); + return canvas; + } + + var fromCharCode = String.fromCharCode; + + /** + * Get string from char code in data view. + * @param {DataView} dataView - The data view for read. + * @param {number} start - The start index. + * @param {number} length - The read length. + * @returns {string} The read result. + */ + + function getStringFromCharCode(dataView, start, length) { + var str = ''; + var i = void 0; + + length += start; + + for (i = start; i < length; i += 1) { + str += fromCharCode(dataView.getUint8(i)); + } + + return str; + } + + var REGEXP_DATA_URL_HEAD = /^data:.*,/; + + /** + * Transform Data URL to array buffer. + * @param {string} dataURL - The Data URL to transform. + * @returns {ArrayBuffer} The result array buffer. + */ + function dataURLToArrayBuffer(dataURL) { + var base64 = dataURL.replace(REGEXP_DATA_URL_HEAD, ''); + var binary = atob(base64); + var arrayBuffer = new ArrayBuffer(binary.length); + var uint8 = new Uint8Array(arrayBuffer); + + forEach(uint8, function (value, i) { + uint8[i] = binary.charCodeAt(i); + }); + + return arrayBuffer; + } + + /** + * Transform array buffer to Data URL. + * @param {ArrayBuffer} arrayBuffer - The array buffer to transform. + * @param {string} mimeType - The mime type of the Data URL. + * @returns {string} The result Data URL. + */ + function arrayBufferToDataURL(arrayBuffer, mimeType) { + var uint8 = new Uint8Array(arrayBuffer); + var data = ''; + + // TypedArray.prototype.forEach is not supported in some browsers. + forEach(uint8, function (value) { + data += fromCharCode(value); + }); + + return 'data:' + mimeType + ';base64,' + btoa(data); + } + + /** + * Get orientation value from given array buffer. + * @param {ArrayBuffer} arrayBuffer - The array buffer to read. + * @returns {number} The read orientation value. + */ + function getOrientation(arrayBuffer) { + var dataView = new DataView(arrayBuffer); + var orientation = void 0; + var littleEndian = void 0; + var app1Start = void 0; + var ifdStart = void 0; + + // Only handle JPEG image (start by 0xFFD8) + if (dataView.getUint8(0) === 0xFF && dataView.getUint8(1) === 0xD8) { + var length = dataView.byteLength; + var offset = 2; + + while (offset < length) { + if (dataView.getUint8(offset) === 0xFF && dataView.getUint8(offset + 1) === 0xE1) { + app1Start = offset; + break; + } + + offset += 1; + } + } + + if (app1Start) { + var exifIDCode = app1Start + 4; + var tiffOffset = app1Start + 10; + + if (getStringFromCharCode(dataView, exifIDCode, 4) === 'Exif') { + var endianness = dataView.getUint16(tiffOffset); + + littleEndian = endianness === 0x4949; + + if (littleEndian || endianness === 0x4D4D /* bigEndian */) { + if (dataView.getUint16(tiffOffset + 2, littleEndian) === 0x002A) { + var firstIFDOffset = dataView.getUint32(tiffOffset + 4, littleEndian); + + if (firstIFDOffset >= 0x00000008) { + ifdStart = tiffOffset + firstIFDOffset; + } + } + } + } + } + + if (ifdStart) { + var _length = dataView.getUint16(ifdStart, littleEndian); + var _offset = void 0; + var i = void 0; + + for (i = 0; i < _length; i += 1) { + _offset = ifdStart + i * 12 + 2; + + if (dataView.getUint16(_offset, littleEndian) === 0x0112 /* Orientation */) { + // 8 is the offset of the current tag's value + _offset += 8; + + // Get the original orientation value + orientation = dataView.getUint16(_offset, littleEndian); + + // Override the orientation with its default value + dataView.setUint16(_offset, 1, littleEndian); + break; + } + } + } + + return orientation; + } + + /** + * Parse Exif Orientation value. + * @param {number} orientation - The orientation to parse. + * @returns {Object} The parsed result. + */ + function parseOrientation(orientation) { + var rotate = 0; + var scaleX = 1; + var scaleY = 1; + + switch (orientation) { + // Flip horizontal + case 2: + scaleX = -1; + break; + + // Rotate left 180° + case 3: + rotate = -180; + break; + + // Flip vertical + case 4: + scaleY = -1; + break; + + // Flip vertical and rotate right 90° + case 5: + rotate = 90; + scaleY = -1; + break; + + // Rotate right 90° + case 6: + rotate = 90; + break; + + // Flip horizontal and rotate right 90° + case 7: + rotate = 90; + scaleX = -1; + break; + + // Rotate left 90° + case 8: + rotate = -90; + break; + + default: + } + + return { + rotate: rotate, + scaleX: scaleX, + scaleY: scaleY + }; + } + + var render = { + render: function render() { + this.initContainer(); + this.initCanvas(); + this.initCropBox(); + this.renderCanvas(); + + if (this.cropped) { + this.renderCropBox(); + } + }, + initContainer: function initContainer() { + var element = this.element, + options = this.options, + container = this.container, + cropper = this.cropper; + + + addClass(cropper, CLASS_HIDDEN); + removeClass(element, CLASS_HIDDEN); + + var containerData = { + width: Math.max(container.offsetWidth, Number(options.minContainerWidth) || 200), + height: Math.max(container.offsetHeight, Number(options.minContainerHeight) || 100) + }; + + this.containerData = containerData; + + setStyle(cropper, { + width: containerData.width, + height: containerData.height + }); + + addClass(element, CLASS_HIDDEN); + removeClass(cropper, CLASS_HIDDEN); + }, + + + // Canvas (image wrapper) + initCanvas: function initCanvas() { + var containerData = this.containerData, + imageData = this.imageData; + var viewMode = this.options.viewMode; + + var rotated = Math.abs(imageData.rotate) % 180 === 90; + var naturalWidth = rotated ? imageData.naturalHeight : imageData.naturalWidth; + var naturalHeight = rotated ? imageData.naturalWidth : imageData.naturalHeight; + var aspectRatio = naturalWidth / naturalHeight; + var canvasWidth = containerData.width; + var canvasHeight = containerData.height; + + if (containerData.height * aspectRatio > containerData.width) { + if (viewMode === 3) { + canvasWidth = containerData.height * aspectRatio; + } else { + canvasHeight = containerData.width / aspectRatio; + } + } else if (viewMode === 3) { + canvasHeight = containerData.width / aspectRatio; + } else { + canvasWidth = containerData.height * aspectRatio; + } + + var canvasData = { + aspectRatio: aspectRatio, + naturalWidth: naturalWidth, + naturalHeight: naturalHeight, + width: canvasWidth, + height: canvasHeight + }; + + canvasData.left = (containerData.width - canvasWidth) / 2; + canvasData.top = (containerData.height - canvasHeight) / 2; + canvasData.oldLeft = canvasData.left; + canvasData.oldTop = canvasData.top; + + this.canvasData = canvasData; + this.limited = viewMode === 1 || viewMode === 2; + this.limitCanvas(true, true); + this.initialImageData = assign({}, imageData); + this.initialCanvasData = assign({}, canvasData); + }, + limitCanvas: function limitCanvas(sizeLimited, positionLimited) { + var options = this.options, + containerData = this.containerData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + var viewMode = options.viewMode; + var aspectRatio = canvasData.aspectRatio; + + var cropped = this.cropped && cropBoxData; + + if (sizeLimited) { + var minCanvasWidth = Number(options.minCanvasWidth) || 0; + var minCanvasHeight = Number(options.minCanvasHeight) || 0; + + if (viewMode > 1) { + minCanvasWidth = Math.max(minCanvasWidth, containerData.width); + minCanvasHeight = Math.max(minCanvasHeight, containerData.height); + + if (viewMode === 3) { + if (minCanvasHeight * aspectRatio > minCanvasWidth) { + minCanvasWidth = minCanvasHeight * aspectRatio; + } else { + minCanvasHeight = minCanvasWidth / aspectRatio; + } + } + } else if (viewMode > 0) { + if (minCanvasWidth) { + minCanvasWidth = Math.max(minCanvasWidth, cropped ? cropBoxData.width : 0); + } else if (minCanvasHeight) { + minCanvasHeight = Math.max(minCanvasHeight, cropped ? cropBoxData.height : 0); + } else if (cropped) { + minCanvasWidth = cropBoxData.width; + minCanvasHeight = cropBoxData.height; + + if (minCanvasHeight * aspectRatio > minCanvasWidth) { + minCanvasWidth = minCanvasHeight * aspectRatio; + } else { + minCanvasHeight = minCanvasWidth / aspectRatio; + } + } + } + + var _getAdjustedSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: minCanvasWidth, + height: minCanvasHeight + }); + + minCanvasWidth = _getAdjustedSizes.width; + minCanvasHeight = _getAdjustedSizes.height; + + + canvasData.minWidth = minCanvasWidth; + canvasData.minHeight = minCanvasHeight; + canvasData.maxWidth = Infinity; + canvasData.maxHeight = Infinity; + } + + if (positionLimited) { + if (viewMode) { + var newCanvasLeft = containerData.width - canvasData.width; + var newCanvasTop = containerData.height - canvasData.height; + + canvasData.minLeft = Math.min(0, newCanvasLeft); + canvasData.minTop = Math.min(0, newCanvasTop); + canvasData.maxLeft = Math.max(0, newCanvasLeft); + canvasData.maxTop = Math.max(0, newCanvasTop); + + if (cropped && this.limited) { + canvasData.minLeft = Math.min(cropBoxData.left, cropBoxData.left + (cropBoxData.width - canvasData.width)); + canvasData.minTop = Math.min(cropBoxData.top, cropBoxData.top + (cropBoxData.height - canvasData.height)); + canvasData.maxLeft = cropBoxData.left; + canvasData.maxTop = cropBoxData.top; + + if (viewMode === 2) { + if (canvasData.width >= containerData.width) { + canvasData.minLeft = Math.min(0, newCanvasLeft); + canvasData.maxLeft = Math.max(0, newCanvasLeft); + } + + if (canvasData.height >= containerData.height) { + canvasData.minTop = Math.min(0, newCanvasTop); + canvasData.maxTop = Math.max(0, newCanvasTop); + } + } + } + } else { + canvasData.minLeft = -canvasData.width; + canvasData.minTop = -canvasData.height; + canvasData.maxLeft = containerData.width; + canvasData.maxTop = containerData.height; + } + } + }, + renderCanvas: function renderCanvas(changed, transformed) { + var canvasData = this.canvasData, + imageData = this.imageData; + + + if (transformed) { + var _getRotatedSizes = getRotatedSizes({ + width: imageData.naturalWidth * Math.abs(imageData.scaleX || 1), + height: imageData.naturalHeight * Math.abs(imageData.scaleY || 1), + degree: imageData.rotate || 0 + }), + naturalWidth = _getRotatedSizes.width, + naturalHeight = _getRotatedSizes.height; + + var width = canvasData.width * (naturalWidth / canvasData.naturalWidth); + var height = canvasData.height * (naturalHeight / canvasData.naturalHeight); + + canvasData.left -= (width - canvasData.width) / 2; + canvasData.top -= (height - canvasData.height) / 2; + canvasData.width = width; + canvasData.height = height; + canvasData.aspectRatio = naturalWidth / naturalHeight; + canvasData.naturalWidth = naturalWidth; + canvasData.naturalHeight = naturalHeight; + this.limitCanvas(true, false); + } + + if (canvasData.width > canvasData.maxWidth || canvasData.width < canvasData.minWidth) { + canvasData.left = canvasData.oldLeft; + } + + if (canvasData.height > canvasData.maxHeight || canvasData.height < canvasData.minHeight) { + canvasData.top = canvasData.oldTop; + } + + canvasData.width = Math.min(Math.max(canvasData.width, canvasData.minWidth), canvasData.maxWidth); + canvasData.height = Math.min(Math.max(canvasData.height, canvasData.minHeight), canvasData.maxHeight); + + this.limitCanvas(false, true); + + canvasData.left = Math.min(Math.max(canvasData.left, canvasData.minLeft), canvasData.maxLeft); + canvasData.top = Math.min(Math.max(canvasData.top, canvasData.minTop), canvasData.maxTop); + canvasData.oldLeft = canvasData.left; + canvasData.oldTop = canvasData.top; + + setStyle(this.canvas, assign({ + width: canvasData.width, + height: canvasData.height + }, getTransforms({ + translateX: canvasData.left, + translateY: canvasData.top + }))); + + this.renderImage(changed); + + if (this.cropped && this.limited) { + this.limitCropBox(true, true); + } + }, + renderImage: function renderImage(changed) { + var canvasData = this.canvasData, + imageData = this.imageData; + + var width = imageData.naturalWidth * (canvasData.width / canvasData.naturalWidth); + var height = imageData.naturalHeight * (canvasData.height / canvasData.naturalHeight); + + assign(imageData, { + width: width, + height: height, + left: (canvasData.width - width) / 2, + top: (canvasData.height - height) / 2 + }); + setStyle(this.image, assign({ + width: imageData.width, + height: imageData.height + }, getTransforms(assign({ + translateX: imageData.left, + translateY: imageData.top + }, imageData)))); + + if (changed) { + this.output(); + } + }, + initCropBox: function initCropBox() { + var options = this.options, + canvasData = this.canvasData; + var aspectRatio = options.aspectRatio; + + var autoCropArea = Number(options.autoCropArea) || 0.8; + var cropBoxData = { + width: canvasData.width, + height: canvasData.height + }; + + if (aspectRatio) { + if (canvasData.height * aspectRatio > canvasData.width) { + cropBoxData.height = cropBoxData.width / aspectRatio; + } else { + cropBoxData.width = cropBoxData.height * aspectRatio; + } + } + + this.cropBoxData = cropBoxData; + this.limitCropBox(true, true); + + // Initialize auto crop area + cropBoxData.width = Math.min(Math.max(cropBoxData.width, cropBoxData.minWidth), cropBoxData.maxWidth); + cropBoxData.height = Math.min(Math.max(cropBoxData.height, cropBoxData.minHeight), cropBoxData.maxHeight); + + // The width/height of auto crop area must large than "minWidth/Height" + cropBoxData.width = Math.max(cropBoxData.minWidth, cropBoxData.width * autoCropArea); + cropBoxData.height = Math.max(cropBoxData.minHeight, cropBoxData.height * autoCropArea); + cropBoxData.left = canvasData.left + (canvasData.width - cropBoxData.width) / 2; + cropBoxData.top = canvasData.top + (canvasData.height - cropBoxData.height) / 2; + cropBoxData.oldLeft = cropBoxData.left; + cropBoxData.oldTop = cropBoxData.top; + + this.initialCropBoxData = assign({}, cropBoxData); + }, + limitCropBox: function limitCropBox(sizeLimited, positionLimited) { + var options = this.options, + containerData = this.containerData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData, + limited = this.limited; + var aspectRatio = options.aspectRatio; + + + if (sizeLimited) { + var minCropBoxWidth = Number(options.minCropBoxWidth) || 0; + var minCropBoxHeight = Number(options.minCropBoxHeight) || 0; + var maxCropBoxWidth = Math.min(containerData.width, limited ? canvasData.width : containerData.width); + var maxCropBoxHeight = Math.min(containerData.height, limited ? canvasData.height : containerData.height); + + // The min/maxCropBoxWidth/Height must be less than container's width/height + minCropBoxWidth = Math.min(minCropBoxWidth, containerData.width); + minCropBoxHeight = Math.min(minCropBoxHeight, containerData.height); + + if (aspectRatio) { + if (minCropBoxWidth && minCropBoxHeight) { + if (minCropBoxHeight * aspectRatio > minCropBoxWidth) { + minCropBoxHeight = minCropBoxWidth / aspectRatio; + } else { + minCropBoxWidth = minCropBoxHeight * aspectRatio; + } + } else if (minCropBoxWidth) { + minCropBoxHeight = minCropBoxWidth / aspectRatio; + } else if (minCropBoxHeight) { + minCropBoxWidth = minCropBoxHeight * aspectRatio; + } + + if (maxCropBoxHeight * aspectRatio > maxCropBoxWidth) { + maxCropBoxHeight = maxCropBoxWidth / aspectRatio; + } else { + maxCropBoxWidth = maxCropBoxHeight * aspectRatio; + } + } + + // The minWidth/Height must be less than maxWidth/Height + cropBoxData.minWidth = Math.min(minCropBoxWidth, maxCropBoxWidth); + cropBoxData.minHeight = Math.min(minCropBoxHeight, maxCropBoxHeight); + cropBoxData.maxWidth = maxCropBoxWidth; + cropBoxData.maxHeight = maxCropBoxHeight; + } + + if (positionLimited) { + if (limited) { + cropBoxData.minLeft = Math.max(0, canvasData.left); + cropBoxData.minTop = Math.max(0, canvasData.top); + cropBoxData.maxLeft = Math.min(containerData.width, canvasData.left + canvasData.width) - cropBoxData.width; + cropBoxData.maxTop = Math.min(containerData.height, canvasData.top + canvasData.height) - cropBoxData.height; + } else { + cropBoxData.minLeft = 0; + cropBoxData.minTop = 0; + cropBoxData.maxLeft = containerData.width - cropBoxData.width; + cropBoxData.maxTop = containerData.height - cropBoxData.height; + } + } + }, + renderCropBox: function renderCropBox() { + var options = this.options, + containerData = this.containerData, + cropBoxData = this.cropBoxData; + + + if (cropBoxData.width > cropBoxData.maxWidth || cropBoxData.width < cropBoxData.minWidth) { + cropBoxData.left = cropBoxData.oldLeft; + } + + if (cropBoxData.height > cropBoxData.maxHeight || cropBoxData.height < cropBoxData.minHeight) { + cropBoxData.top = cropBoxData.oldTop; + } + + cropBoxData.width = Math.min(Math.max(cropBoxData.width, cropBoxData.minWidth), cropBoxData.maxWidth); + cropBoxData.height = Math.min(Math.max(cropBoxData.height, cropBoxData.minHeight), cropBoxData.maxHeight); + + this.limitCropBox(false, true); + + cropBoxData.left = Math.min(Math.max(cropBoxData.left, cropBoxData.minLeft), cropBoxData.maxLeft); + cropBoxData.top = Math.min(Math.max(cropBoxData.top, cropBoxData.minTop), cropBoxData.maxTop); + cropBoxData.oldLeft = cropBoxData.left; + cropBoxData.oldTop = cropBoxData.top; + + if (options.movable && options.cropBoxMovable) { + // Turn to move the canvas when the crop box is equal to the container + setData(this.face, DATA_ACTION, cropBoxData.width >= containerData.width && cropBoxData.height >= containerData.height ? ACTION_MOVE : ACTION_ALL); + } + + setStyle(this.cropBox, assign({ + width: cropBoxData.width, + height: cropBoxData.height + }, getTransforms({ + translateX: cropBoxData.left, + translateY: cropBoxData.top + }))); + + if (this.cropped && this.limited) { + this.limitCanvas(true, true); + } + + if (!this.disabled) { + this.output(); + } + }, + output: function output() { + this.preview(); + dispatchEvent(this.element, EVENT_CROP, this.getData()); + } + }; + + var preview = { + initPreview: function initPreview() { + var crossOrigin = this.crossOrigin; + var preview = this.options.preview; + + var url = crossOrigin ? this.crossOriginUrl : this.url; + var image = document.createElement('img'); + + if (crossOrigin) { + image.crossOrigin = crossOrigin; + } + + image.src = url; + this.viewBox.appendChild(image); + this.viewBoxImage = image; + + if (!preview) { + return; + } + + var previews = preview; + + if (typeof preview === 'string') { + previews = this.element.ownerDocument.querySelectorAll(preview); + } else if (preview.querySelector) { + previews = [preview]; + } + + this.previews = previews; + + forEach(previews, function (el) { + var img = document.createElement('img'); + + // Save the original size for recover + setData(el, DATA_PREVIEW, { + width: el.offsetWidth, + height: el.offsetHeight, + html: el.innerHTML + }); + + if (crossOrigin) { + img.crossOrigin = crossOrigin; + } + + img.src = url; + + /** + * Override img element styles + * Add `display:block` to avoid margin top issue + * Add `height:auto` to override `height` attribute on IE8 + * (Occur only when margin-top <= -height) + */ + img.style.cssText = 'display:block;' + 'width:100%;' + 'height:auto;' + 'min-width:0!important;' + 'min-height:0!important;' + 'max-width:none!important;' + 'max-height:none!important;' + 'image-orientation:0deg!important;"'; + + el.innerHTML = ''; + el.appendChild(img); + }); + }, + resetPreview: function resetPreview() { + forEach(this.previews, function (element) { + var data = getData(element, DATA_PREVIEW); + + setStyle(element, { + width: data.width, + height: data.height + }); + + element.innerHTML = data.html; + removeData(element, DATA_PREVIEW); + }); + }, + preview: function preview() { + var imageData = this.imageData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + var cropBoxWidth = cropBoxData.width, + cropBoxHeight = cropBoxData.height; + var width = imageData.width, + height = imageData.height; + + var left = cropBoxData.left - canvasData.left - imageData.left; + var top = cropBoxData.top - canvasData.top - imageData.top; + + if (!this.cropped || this.disabled) { + return; + } + + setStyle(this.viewBoxImage, assign({ + width: width, + height: height + }, getTransforms(assign({ + translateX: -left, + translateY: -top + }, imageData)))); + + forEach(this.previews, function (element) { + var data = getData(element, DATA_PREVIEW); + var originalWidth = data.width; + var originalHeight = data.height; + var newWidth = originalWidth; + var newHeight = originalHeight; + var ratio = 1; + + if (cropBoxWidth) { + ratio = originalWidth / cropBoxWidth; + newHeight = cropBoxHeight * ratio; + } + + if (cropBoxHeight && newHeight > originalHeight) { + ratio = originalHeight / cropBoxHeight; + newWidth = cropBoxWidth * ratio; + newHeight = originalHeight; + } + + setStyle(element, { + width: newWidth, + height: newHeight + }); + + setStyle(element.getElementsByTagName('img')[0], assign({ + width: width * ratio, + height: height * ratio + }, getTransforms(assign({ + translateX: -left * ratio, + translateY: -top * ratio + }, imageData)))); + }); + } + }; + + var events = { + bind: function bind() { + var element = this.element, + options = this.options, + cropper = this.cropper; + + + if (isFunction(options.cropstart)) { + addListener(element, EVENT_CROP_START, options.cropstart); + } + + if (isFunction(options.cropmove)) { + addListener(element, EVENT_CROP_MOVE, options.cropmove); + } + + if (isFunction(options.cropend)) { + addListener(element, EVENT_CROP_END, options.cropend); + } + + if (isFunction(options.crop)) { + addListener(element, EVENT_CROP, options.crop); + } + + if (isFunction(options.zoom)) { + addListener(element, EVENT_ZOOM, options.zoom); + } + + addListener(cropper, EVENT_POINTER_DOWN, this.onCropStart = this.cropStart.bind(this)); + + if (options.zoomable && options.zoomOnWheel) { + addListener(cropper, EVENT_WHEEL, this.onWheel = this.wheel.bind(this)); + } + + if (options.toggleDragModeOnDblclick) { + addListener(cropper, EVENT_DBLCLICK, this.onDblclick = this.dblclick.bind(this)); + } + + addListener(element.ownerDocument, EVENT_POINTER_MOVE, this.onCropMove = this.cropMove.bind(this)); + addListener(element.ownerDocument, EVENT_POINTER_UP, this.onCropEnd = this.cropEnd.bind(this)); + + if (options.responsive) { + addListener(window, EVENT_RESIZE, this.onResize = this.resize.bind(this)); + } + }, + unbind: function unbind() { + var element = this.element, + options = this.options, + cropper = this.cropper; + + + if (isFunction(options.cropstart)) { + removeListener(element, EVENT_CROP_START, options.cropstart); + } + + if (isFunction(options.cropmove)) { + removeListener(element, EVENT_CROP_MOVE, options.cropmove); + } + + if (isFunction(options.cropend)) { + removeListener(element, EVENT_CROP_END, options.cropend); + } + + if (isFunction(options.crop)) { + removeListener(element, EVENT_CROP, options.crop); + } + + if (isFunction(options.zoom)) { + removeListener(element, EVENT_ZOOM, options.zoom); + } + + removeListener(cropper, EVENT_POINTER_DOWN, this.onCropStart); + + if (options.zoomable && options.zoomOnWheel) { + removeListener(cropper, EVENT_WHEEL, this.onWheel); + } + + if (options.toggleDragModeOnDblclick) { + removeListener(cropper, EVENT_DBLCLICK, this.onDblclick); + } + + removeListener(element.ownerDocument, EVENT_POINTER_MOVE, this.onCropMove); + removeListener(element.ownerDocument, EVENT_POINTER_UP, this.onCropEnd); + + if (options.responsive) { + removeListener(window, EVENT_RESIZE, this.onResize); + } + } + }; + + var handlers = { + resize: function resize() { + var options = this.options, + container = this.container, + containerData = this.containerData; + + var minContainerWidth = Number(options.minContainerWidth) || 200; + var minContainerHeight = Number(options.minContainerHeight) || 100; + + if (this.disabled || containerData.width <= minContainerWidth || containerData.height <= minContainerHeight) { + return; + } + + var ratio = container.offsetWidth / containerData.width; + + // Resize when width changed or height changed + if (ratio !== 1 || container.offsetHeight !== containerData.height) { + var canvasData = void 0; + var cropBoxData = void 0; + + if (options.restore) { + canvasData = this.getCanvasData(); + cropBoxData = this.getCropBoxData(); + } + + this.render(); + + if (options.restore) { + this.setCanvasData(forEach(canvasData, function (n, i) { + canvasData[i] = n * ratio; + })); + this.setCropBoxData(forEach(cropBoxData, function (n, i) { + cropBoxData[i] = n * ratio; + })); + } + } + }, + dblclick: function dblclick() { + if (this.disabled || this.options.dragMode === DRAG_MODE_NONE) { + return; + } + + this.setDragMode(hasClass(this.dragBox, CLASS_CROP) ? DRAG_MODE_MOVE : DRAG_MODE_CROP); + }, + wheel: function wheel(e) { + var _this = this; + + var ratio = Number(this.options.wheelZoomRatio) || 0.1; + var delta = 1; + + if (this.disabled) { + return; + } + + e.preventDefault(); + + // Limit wheel speed to prevent zoom too fast (#21) + if (this.wheeling) { + return; + } + + this.wheeling = true; + + setTimeout(function () { + _this.wheeling = false; + }, 50); + + if (e.deltaY) { + delta = e.deltaY > 0 ? 1 : -1; + } else if (e.wheelDelta) { + delta = -e.wheelDelta / 120; + } else if (e.detail) { + delta = e.detail > 0 ? 1 : -1; + } + + this.zoom(-delta * ratio, e); + }, + cropStart: function cropStart(e) { + if (this.disabled) { + return; + } + + var options = this.options, + pointers = this.pointers; + + var action = void 0; + + if (e.changedTouches) { + // Handle touch event + forEach(e.changedTouches, function (touch) { + pointers[touch.identifier] = getPointer(touch); + }); + } else { + // Handle mouse event and pointer event + pointers[e.pointerId || 0] = getPointer(e); + } + + if (Object.keys(pointers).length > 1 && options.zoomable && options.zoomOnTouch) { + action = ACTION_ZOOM; + } else { + action = getData(e.target, DATA_ACTION); + } + + if (!REGEXP_ACTIONS.test(action)) { + return; + } + + if (dispatchEvent(this.element, EVENT_CROP_START, { + originalEvent: e, + action: action + }) === false) { + return; + } + + e.preventDefault(); + + this.action = action; + this.cropping = false; + + if (action === ACTION_CROP) { + this.cropping = true; + addClass(this.dragBox, CLASS_MODAL); + } + }, + cropMove: function cropMove(e) { + var action = this.action; + + + if (this.disabled || !action) { + return; + } + + var pointers = this.pointers; + + + e.preventDefault(); + + if (dispatchEvent(this.element, EVENT_CROP_MOVE, { + originalEvent: e, + action: action + }) === false) { + return; + } + + if (e.changedTouches) { + forEach(e.changedTouches, function (touch) { + assign(pointers[touch.identifier], getPointer(touch, true)); + }); + } else { + assign(pointers[e.pointerId || 0], getPointer(e, true)); + } + + this.change(e); + }, + cropEnd: function cropEnd(e) { + if (this.disabled) { + return; + } + + var action = this.action, + pointers = this.pointers; + + + if (e.changedTouches) { + forEach(e.changedTouches, function (touch) { + delete pointers[touch.identifier]; + }); + } else { + delete pointers[e.pointerId || 0]; + } + + if (!action) { + return; + } + + e.preventDefault(); + + if (!Object.keys(pointers).length) { + this.action = ''; + } + + if (this.cropping) { + this.cropping = false; + toggleClass(this.dragBox, CLASS_MODAL, this.cropped && this.options.modal); + } + + dispatchEvent(this.element, EVENT_CROP_END, { + originalEvent: e, + action: action + }); + } + }; + + var change = { + change: function change(e) { + var options = this.options, + canvasData = this.canvasData, + containerData = this.containerData, + cropBoxData = this.cropBoxData, + pointers = this.pointers; + var action = this.action; + var aspectRatio = options.aspectRatio; + var left = cropBoxData.left, + top = cropBoxData.top, + width = cropBoxData.width, + height = cropBoxData.height; + + var right = left + width; + var bottom = top + height; + var minLeft = 0; + var minTop = 0; + var maxWidth = containerData.width; + var maxHeight = containerData.height; + var renderable = true; + var offset = void 0; + + // Locking aspect ratio in "free mode" by holding shift key + if (!aspectRatio && e.shiftKey) { + aspectRatio = width && height ? width / height : 1; + } + + if (this.limited) { + minLeft = cropBoxData.minLeft; + minTop = cropBoxData.minTop; + + maxWidth = minLeft + Math.min(containerData.width, canvasData.width, canvasData.left + canvasData.width); + maxHeight = minTop + Math.min(containerData.height, canvasData.height, canvasData.top + canvasData.height); + } + + var pointer = pointers[Object.keys(pointers)[0]]; + var range = { + x: pointer.endX - pointer.startX, + y: pointer.endY - pointer.startY + }; + var check = function check(side) { + switch (side) { + case ACTION_EAST: + if (right + range.x > maxWidth) { + range.x = maxWidth - right; + } + + break; + + case ACTION_WEST: + if (left + range.x < minLeft) { + range.x = minLeft - left; + } + + break; + + case ACTION_NORTH: + if (top + range.y < minTop) { + range.y = minTop - top; + } + + break; + + case ACTION_SOUTH: + if (bottom + range.y > maxHeight) { + range.y = maxHeight - bottom; + } + + break; + + default: + } + }; + + switch (action) { + // Move crop box + case ACTION_ALL: + left += range.x; + top += range.y; + break; + + // Resize crop box + case ACTION_EAST: + if (range.x >= 0 && (right >= maxWidth || aspectRatio && (top <= minTop || bottom >= maxHeight))) { + renderable = false; + break; + } + + check(ACTION_EAST); + width += range.x; + + if (aspectRatio) { + height = width / aspectRatio; + top -= range.x / aspectRatio / 2; + } + + if (width < 0) { + action = ACTION_WEST; + width = 0; + } + + break; + + case ACTION_NORTH: + if (range.y <= 0 && (top <= minTop || aspectRatio && (left <= minLeft || right >= maxWidth))) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + + if (aspectRatio) { + width = height * aspectRatio; + left += range.y * aspectRatio / 2; + } + + if (height < 0) { + action = ACTION_SOUTH; + height = 0; + } + + break; + + case ACTION_WEST: + if (range.x <= 0 && (left <= minLeft || aspectRatio && (top <= minTop || bottom >= maxHeight))) { + renderable = false; + break; + } + + check(ACTION_WEST); + width -= range.x; + left += range.x; + + if (aspectRatio) { + height = width / aspectRatio; + top += range.x / aspectRatio / 2; + } + + if (width < 0) { + action = ACTION_EAST; + width = 0; + } + + break; + + case ACTION_SOUTH: + if (range.y >= 0 && (bottom >= maxHeight || aspectRatio && (left <= minLeft || right >= maxWidth))) { + renderable = false; + break; + } + + check(ACTION_SOUTH); + height += range.y; + + if (aspectRatio) { + width = height * aspectRatio; + left -= range.y * aspectRatio / 2; + } + + if (height < 0) { + action = ACTION_NORTH; + height = 0; + } + + break; + + case ACTION_NORTH_EAST: + if (aspectRatio) { + if (range.y <= 0 && (top <= minTop || right >= maxWidth)) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + width = height * aspectRatio; + } else { + check(ACTION_NORTH); + check(ACTION_EAST); + + if (range.x >= 0) { + if (right < maxWidth) { + width += range.x; + } else if (range.y <= 0 && top <= minTop) { + renderable = false; + } + } else { + width += range.x; + } + + if (range.y <= 0) { + if (top > minTop) { + height -= range.y; + top += range.y; + } + } else { + height -= range.y; + top += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_SOUTH_WEST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_NORTH_WEST; + width = 0; + } else if (height < 0) { + action = ACTION_SOUTH_EAST; + height = 0; + } + + break; + + case ACTION_NORTH_WEST: + if (aspectRatio) { + if (range.y <= 0 && (top <= minTop || left <= minLeft)) { + renderable = false; + break; + } + + check(ACTION_NORTH); + height -= range.y; + top += range.y; + width = height * aspectRatio; + left += range.y * aspectRatio; + } else { + check(ACTION_NORTH); + check(ACTION_WEST); + + if (range.x <= 0) { + if (left > minLeft) { + width -= range.x; + left += range.x; + } else if (range.y <= 0 && top <= minTop) { + renderable = false; + } + } else { + width -= range.x; + left += range.x; + } + + if (range.y <= 0) { + if (top > minTop) { + height -= range.y; + top += range.y; + } + } else { + height -= range.y; + top += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_SOUTH_EAST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_NORTH_EAST; + width = 0; + } else if (height < 0) { + action = ACTION_SOUTH_WEST; + height = 0; + } + + break; + + case ACTION_SOUTH_WEST: + if (aspectRatio) { + if (range.x <= 0 && (left <= minLeft || bottom >= maxHeight)) { + renderable = false; + break; + } + + check(ACTION_WEST); + width -= range.x; + left += range.x; + height = width / aspectRatio; + } else { + check(ACTION_SOUTH); + check(ACTION_WEST); + + if (range.x <= 0) { + if (left > minLeft) { + width -= range.x; + left += range.x; + } else if (range.y >= 0 && bottom >= maxHeight) { + renderable = false; + } + } else { + width -= range.x; + left += range.x; + } + + if (range.y >= 0) { + if (bottom < maxHeight) { + height += range.y; + } + } else { + height += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_NORTH_EAST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_SOUTH_EAST; + width = 0; + } else if (height < 0) { + action = ACTION_NORTH_WEST; + height = 0; + } + + break; + + case ACTION_SOUTH_EAST: + if (aspectRatio) { + if (range.x >= 0 && (right >= maxWidth || bottom >= maxHeight)) { + renderable = false; + break; + } + + check(ACTION_EAST); + width += range.x; + height = width / aspectRatio; + } else { + check(ACTION_SOUTH); + check(ACTION_EAST); + + if (range.x >= 0) { + if (right < maxWidth) { + width += range.x; + } else if (range.y >= 0 && bottom >= maxHeight) { + renderable = false; + } + } else { + width += range.x; + } + + if (range.y >= 0) { + if (bottom < maxHeight) { + height += range.y; + } + } else { + height += range.y; + } + } + + if (width < 0 && height < 0) { + action = ACTION_NORTH_WEST; + height = 0; + width = 0; + } else if (width < 0) { + action = ACTION_SOUTH_WEST; + width = 0; + } else if (height < 0) { + action = ACTION_NORTH_EAST; + height = 0; + } + + break; + + // Move canvas + case ACTION_MOVE: + this.move(range.x, range.y); + renderable = false; + break; + + // Zoom canvas + case ACTION_ZOOM: + this.zoom(getMaxZoomRatio(pointers), e); + renderable = false; + break; + + // Create crop box + case ACTION_CROP: + if (!range.x || !range.y) { + renderable = false; + break; + } + + offset = getOffset(this.cropper); + left = pointer.startX - offset.left; + top = pointer.startY - offset.top; + width = cropBoxData.minWidth; + height = cropBoxData.minHeight; + + if (range.x > 0) { + action = range.y > 0 ? ACTION_SOUTH_EAST : ACTION_NORTH_EAST; + } else if (range.x < 0) { + left -= width; + action = range.y > 0 ? ACTION_SOUTH_WEST : ACTION_NORTH_WEST; + } + + if (range.y < 0) { + top -= height; + } + + // Show the crop box if is hidden + if (!this.cropped) { + removeClass(this.cropBox, CLASS_HIDDEN); + this.cropped = true; + + if (this.limited) { + this.limitCropBox(true, true); + } + } + + break; + + default: + } + + if (renderable) { + cropBoxData.width = width; + cropBoxData.height = height; + cropBoxData.left = left; + cropBoxData.top = top; + this.action = action; + this.renderCropBox(); + } + + // Override + forEach(pointers, function (p) { + p.startX = p.endX; + p.startY = p.endY; + }); + } + }; + + var methods = { + // Show the crop box manually + crop: function crop() { + if (this.ready && !this.cropped && !this.disabled) { + this.cropped = true; + this.limitCropBox(true, true); + + if (this.options.modal) { + addClass(this.dragBox, CLASS_MODAL); + } + + removeClass(this.cropBox, CLASS_HIDDEN); + this.setCropBoxData(this.initialCropBoxData); + } + + return this; + }, + + + // Reset the image and crop box to their initial states + reset: function reset() { + if (this.ready && !this.disabled) { + this.imageData = assign({}, this.initialImageData); + this.canvasData = assign({}, this.initialCanvasData); + this.cropBoxData = assign({}, this.initialCropBoxData); + this.renderCanvas(); + + if (this.cropped) { + this.renderCropBox(); + } + } + + return this; + }, + + + // Clear the crop box + clear: function clear() { + if (this.cropped && !this.disabled) { + assign(this.cropBoxData, { + left: 0, + top: 0, + width: 0, + height: 0 + }); + + this.cropped = false; + this.renderCropBox(); + this.limitCanvas(true, true); + + // Render canvas after crop box rendered + this.renderCanvas(); + removeClass(this.dragBox, CLASS_MODAL); + addClass(this.cropBox, CLASS_HIDDEN); + } + + return this; + }, + + + /** + * Replace the image's src and rebuild the cropper + * @param {string} url - The new URL. + * @param {boolean} [hasSameSize] - Indicate if the new image has the same size as the old one. + * @returns {Cropper} this + */ + replace: function replace(url) { + var hasSameSize = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; + + if (!this.disabled && url) { + if (this.isImg) { + this.element.src = url; + } + + if (hasSameSize) { + this.url = url; + this.image.src = url; + + if (this.ready) { + this.viewBoxImage.src = url; + + forEach(this.previews, function (element) { + element.getElementsByTagName('img')[0].src = url; + }); + } + } else { + if (this.isImg) { + this.replaced = true; + } + + this.options.data = null; + this.uncreate(); + this.load(url); + } + } + + return this; + }, + + + // Enable (unfreeze) the cropper + enable: function enable() { + if (this.ready && this.disabled) { + this.disabled = false; + removeClass(this.cropper, CLASS_DISABLED); + } + + return this; + }, + + + // Disable (freeze) the cropper + disable: function disable() { + if (this.ready && !this.disabled) { + this.disabled = true; + addClass(this.cropper, CLASS_DISABLED); + } + + return this; + }, + + + /** + * Destroy the cropper and remove the instance from the image + * @returns {Cropper} this + */ + destroy: function destroy() { + var element = this.element; + + + if (!getData(element, NAMESPACE)) { + return this; + } + + if (this.isImg && this.replaced) { + element.src = this.originalUrl; + } + + this.uncreate(); + removeData(element, NAMESPACE); + + return this; + }, + + + /** + * Move the canvas with relative offsets + * @param {number} offsetX - The relative offset distance on the x-axis. + * @param {number} [offsetY=offsetX] - The relative offset distance on the y-axis. + * @returns {Cropper} this + */ + move: function move(offsetX) { + var offsetY = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : offsetX; + var _canvasData = this.canvasData, + left = _canvasData.left, + top = _canvasData.top; + + + return this.moveTo(isUndefined(offsetX) ? offsetX : left + Number(offsetX), isUndefined(offsetY) ? offsetY : top + Number(offsetY)); + }, + + + /** + * Move the canvas to an absolute point + * @param {number} x - The x-axis coordinate. + * @param {number} [y=x] - The y-axis coordinate. + * @returns {Cropper} this + */ + moveTo: function moveTo(x) { + var y = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : x; + var canvasData = this.canvasData; + + var changed = false; + + x = Number(x); + y = Number(y); + + if (this.ready && !this.disabled && this.options.movable) { + if (isNumber(x)) { + canvasData.left = x; + changed = true; + } + + if (isNumber(y)) { + canvasData.top = y; + changed = true; + } + + if (changed) { + this.renderCanvas(true); + } + } + + return this; + }, + + + /** + * Zoom the canvas with a relative ratio + * @param {number} ratio - The target ratio. + * @param {Event} _originalEvent - The original event if any. + * @returns {Cropper} this + */ + zoom: function zoom(ratio, _originalEvent) { + var canvasData = this.canvasData; + + + ratio = Number(ratio); + + if (ratio < 0) { + ratio = 1 / (1 - ratio); + } else { + ratio = 1 + ratio; + } + + return this.zoomTo(canvasData.width * ratio / canvasData.naturalWidth, null, _originalEvent); + }, + + + /** + * Zoom the canvas to an absolute ratio + * @param {number} ratio - The target ratio. + * @param {Object} pivot - The zoom pivot point coordinate. + * @param {Event} _originalEvent - The original event if any. + * @returns {Cropper} this + */ + zoomTo: function zoomTo(ratio, pivot, _originalEvent) { + var options = this.options, + canvasData = this.canvasData; + var width = canvasData.width, + height = canvasData.height, + naturalWidth = canvasData.naturalWidth, + naturalHeight = canvasData.naturalHeight; + + + ratio = Number(ratio); + + if (ratio >= 0 && this.ready && !this.disabled && options.zoomable) { + var newWidth = naturalWidth * ratio; + var newHeight = naturalHeight * ratio; + + if (dispatchEvent(this.element, EVENT_ZOOM, { + originalEvent: _originalEvent, + oldRatio: width / naturalWidth, + ratio: newWidth / naturalWidth + }) === false) { + return this; + } + + if (_originalEvent) { + var pointers = this.pointers; + + var offset = getOffset(this.cropper); + var center = pointers && Object.keys(pointers).length ? getPointersCenter(pointers) : { + pageX: _originalEvent.pageX, + pageY: _originalEvent.pageY + }; + + // Zoom from the triggering point of the event + canvasData.left -= (newWidth - width) * ((center.pageX - offset.left - canvasData.left) / width); + canvasData.top -= (newHeight - height) * ((center.pageY - offset.top - canvasData.top) / height); + } else if (isPlainObject(pivot) && isNumber(pivot.x) && isNumber(pivot.y)) { + canvasData.left -= (newWidth - width) * ((pivot.x - canvasData.left) / width); + canvasData.top -= (newHeight - height) * ((pivot.y - canvasData.top) / height); + } else { + // Zoom from the center of the canvas + canvasData.left -= (newWidth - width) / 2; + canvasData.top -= (newHeight - height) / 2; + } + + canvasData.width = newWidth; + canvasData.height = newHeight; + this.renderCanvas(true); + } + + return this; + }, + + + /** + * Rotate the canvas with a relative degree + * @param {number} degree - The rotate degree. + * @returns {Cropper} this + */ + rotate: function rotate(degree) { + return this.rotateTo((this.imageData.rotate || 0) + Number(degree)); + }, + + + /** + * Rotate the canvas to an absolute degree + * @param {number} degree - The rotate degree. + * @returns {Cropper} this + */ + rotateTo: function rotateTo(degree) { + degree = Number(degree); + + if (isNumber(degree) && this.ready && !this.disabled && this.options.rotatable) { + this.imageData.rotate = degree % 360; + this.renderCanvas(true, true); + } + + return this; + }, + + + /** + * Scale the image on the x-axis. + * @param {number} scaleX - The scale ratio on the x-axis. + * @returns {Cropper} this + */ + scaleX: function scaleX(_scaleX) { + var scaleY = this.imageData.scaleY; + + + return this.scale(_scaleX, isNumber(scaleY) ? scaleY : 1); + }, + + + /** + * Scale the image on the y-axis. + * @param {number} scaleY - The scale ratio on the y-axis. + * @returns {Cropper} this + */ + scaleY: function scaleY(_scaleY) { + var scaleX = this.imageData.scaleX; + + + return this.scale(isNumber(scaleX) ? scaleX : 1, _scaleY); + }, + + + /** + * Scale the image + * @param {number} scaleX - The scale ratio on the x-axis. + * @param {number} [scaleY=scaleX] - The scale ratio on the y-axis. + * @returns {Cropper} this + */ + scale: function scale(scaleX) { + var scaleY = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : scaleX; + var imageData = this.imageData; + + var transformed = false; + + scaleX = Number(scaleX); + scaleY = Number(scaleY); + + if (this.ready && !this.disabled && this.options.scalable) { + if (isNumber(scaleX)) { + imageData.scaleX = scaleX; + transformed = true; + } + + if (isNumber(scaleY)) { + imageData.scaleY = scaleY; + transformed = true; + } + + if (transformed) { + this.renderCanvas(true, true); + } + } + + return this; + }, + + + /** + * Get the cropped area position and size data (base on the original image) + * @param {boolean} [rounded=false] - Indicate if round the data values or not. + * @returns {Object} The result cropped data. + */ + getData: function getData$$1() { + var rounded = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; + var options = this.options, + imageData = this.imageData, + canvasData = this.canvasData, + cropBoxData = this.cropBoxData; + + var data = void 0; + + if (this.ready && this.cropped) { + data = { + x: cropBoxData.left - canvasData.left, + y: cropBoxData.top - canvasData.top, + width: cropBoxData.width, + height: cropBoxData.height + }; + + var ratio = imageData.width / imageData.naturalWidth; + + forEach(data, function (n, i) { + n /= ratio; + data[i] = rounded ? Math.round(n) : n; + }); + } else { + data = { + x: 0, + y: 0, + width: 0, + height: 0 + }; + } + + if (options.rotatable) { + data.rotate = imageData.rotate || 0; + } + + if (options.scalable) { + data.scaleX = imageData.scaleX || 1; + data.scaleY = imageData.scaleY || 1; + } + + return data; + }, + + + /** + * Set the cropped area position and size with new data + * @param {Object} data - The new data. + * @returns {Cropper} this + */ + setData: function setData$$1(data) { + var options = this.options, + imageData = this.imageData, + canvasData = this.canvasData; + + var cropBoxData = {}; + + if (this.ready && !this.disabled && isPlainObject(data)) { + var transformed = false; + + if (options.rotatable) { + if (isNumber(data.rotate) && data.rotate !== imageData.rotate) { + imageData.rotate = data.rotate; + transformed = true; + } + } + + if (options.scalable) { + if (isNumber(data.scaleX) && data.scaleX !== imageData.scaleX) { + imageData.scaleX = data.scaleX; + transformed = true; + } + + if (isNumber(data.scaleY) && data.scaleY !== imageData.scaleY) { + imageData.scaleY = data.scaleY; + transformed = true; + } + } + + if (transformed) { + this.renderCanvas(true, true); + } + + var ratio = imageData.width / imageData.naturalWidth; + + if (isNumber(data.x)) { + cropBoxData.left = data.x * ratio + canvasData.left; + } + + if (isNumber(data.y)) { + cropBoxData.top = data.y * ratio + canvasData.top; + } + + if (isNumber(data.width)) { + cropBoxData.width = data.width * ratio; + } + + if (isNumber(data.height)) { + cropBoxData.height = data.height * ratio; + } + + this.setCropBoxData(cropBoxData); + } + + return this; + }, + + + /** + * Get the container size data. + * @returns {Object} The result container data. + */ + getContainerData: function getContainerData() { + return this.ready ? assign({}, this.containerData) : {}; + }, + + + /** + * Get the image position and size data. + * @returns {Object} The result image data. + */ + getImageData: function getImageData() { + return this.sized ? assign({}, this.imageData) : {}; + }, + + + /** + * Get the canvas position and size data. + * @returns {Object} The result canvas data. + */ + getCanvasData: function getCanvasData() { + var canvasData = this.canvasData; + + var data = {}; + + if (this.ready) { + forEach(['left', 'top', 'width', 'height', 'naturalWidth', 'naturalHeight'], function (n) { + data[n] = canvasData[n]; + }); + } + + return data; + }, + + + /** + * Set the canvas position and size with new data. + * @param {Object} data - The new canvas data. + * @returns {Cropper} this + */ + setCanvasData: function setCanvasData(data) { + var canvasData = this.canvasData; + var aspectRatio = canvasData.aspectRatio; + + + if (this.ready && !this.disabled && isPlainObject(data)) { + if (isNumber(data.left)) { + canvasData.left = data.left; + } + + if (isNumber(data.top)) { + canvasData.top = data.top; + } + + if (isNumber(data.width)) { + canvasData.width = data.width; + canvasData.height = data.width / aspectRatio; + } else if (isNumber(data.height)) { + canvasData.height = data.height; + canvasData.width = data.height * aspectRatio; + } + + this.renderCanvas(true); + } + + return this; + }, + + + /** + * Get the crop box position and size data. + * @returns {Object} The result crop box data. + */ + getCropBoxData: function getCropBoxData() { + var cropBoxData = this.cropBoxData; + + var data = void 0; + + if (this.ready && this.cropped) { + data = { + left: cropBoxData.left, + top: cropBoxData.top, + width: cropBoxData.width, + height: cropBoxData.height + }; + } + + return data || {}; + }, + + + /** + * Set the crop box position and size with new data. + * @param {Object} data - The new crop box data. + * @returns {Cropper} this + */ + setCropBoxData: function setCropBoxData(data) { + var cropBoxData = this.cropBoxData; + var aspectRatio = this.options.aspectRatio; + + var widthChanged = void 0; + var heightChanged = void 0; + + if (this.ready && this.cropped && !this.disabled && isPlainObject(data)) { + if (isNumber(data.left)) { + cropBoxData.left = data.left; + } + + if (isNumber(data.top)) { + cropBoxData.top = data.top; + } + + if (isNumber(data.width) && data.width !== cropBoxData.width) { + widthChanged = true; + cropBoxData.width = data.width; + } + + if (isNumber(data.height) && data.height !== cropBoxData.height) { + heightChanged = true; + cropBoxData.height = data.height; + } + + if (aspectRatio) { + if (widthChanged) { + cropBoxData.height = cropBoxData.width / aspectRatio; + } else if (heightChanged) { + cropBoxData.width = cropBoxData.height * aspectRatio; + } + } + + this.renderCropBox(); + } + + return this; + }, + + + /** + * Get a canvas drawn the cropped image. + * @param {Object} [options={}] - The config options. + * @returns {HTMLCanvasElement} - The result canvas. + */ + getCroppedCanvas: function getCroppedCanvas() { + var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + + if (!this.ready || !window.HTMLCanvasElement) { + return null; + } + + var canvasData = this.canvasData; + + var source = getSourceCanvas(this.image, this.imageData, canvasData, options); + + // Returns the source canvas if it is not cropped. + if (!this.cropped) { + return source; + } + + var _getData = this.getData(), + initialX = _getData.x, + initialY = _getData.y, + initialWidth = _getData.width, + initialHeight = _getData.height; + + var ratio = source.width / Math.floor(canvasData.naturalWidth); + + if (ratio !== 1) { + initialX *= ratio; + initialY *= ratio; + initialWidth *= ratio; + initialHeight *= ratio; + } + + var aspectRatio = initialWidth / initialHeight; + var maxSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.maxWidth || Infinity, + height: options.maxHeight || Infinity + }); + var minSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.minWidth || 0, + height: options.minHeight || 0 + }, 'cover'); + + var _getAdjustedSizes = getAdjustedSizes({ + aspectRatio: aspectRatio, + width: options.width || (ratio !== 1 ? source.width : initialWidth), + height: options.height || (ratio !== 1 ? source.height : initialHeight) + }), + width = _getAdjustedSizes.width, + height = _getAdjustedSizes.height; + + width = Math.min(maxSizes.width, Math.max(minSizes.width, width)); + height = Math.min(maxSizes.height, Math.max(minSizes.height, height)); + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + + canvas.width = normalizeDecimalNumber(width); + canvas.height = normalizeDecimalNumber(height); + + context.fillStyle = options.fillColor || 'transparent'; + context.fillRect(0, 0, width, height); + + var _options$imageSmoothi = options.imageSmoothingEnabled, + imageSmoothingEnabled = _options$imageSmoothi === undefined ? true : _options$imageSmoothi, + imageSmoothingQuality = options.imageSmoothingQuality; + + + context.imageSmoothingEnabled = imageSmoothingEnabled; + + if (imageSmoothingQuality) { + context.imageSmoothingQuality = imageSmoothingQuality; + } + + // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D.drawImage + var sourceWidth = source.width; + var sourceHeight = source.height; + + // Source canvas parameters + var srcX = initialX; + var srcY = initialY; + var srcWidth = void 0; + var srcHeight = void 0; + + // Destination canvas parameters + var dstX = void 0; + var dstY = void 0; + var dstWidth = void 0; + var dstHeight = void 0; + + if (srcX <= -initialWidth || srcX > sourceWidth) { + srcX = 0; + srcWidth = 0; + dstX = 0; + dstWidth = 0; + } else if (srcX <= 0) { + dstX = -srcX; + srcX = 0; + srcWidth = Math.min(sourceWidth, initialWidth + srcX); + dstWidth = srcWidth; + } else if (srcX <= sourceWidth) { + dstX = 0; + srcWidth = Math.min(initialWidth, sourceWidth - srcX); + dstWidth = srcWidth; + } + + if (srcWidth <= 0 || srcY <= -initialHeight || srcY > sourceHeight) { + srcY = 0; + srcHeight = 0; + dstY = 0; + dstHeight = 0; + } else if (srcY <= 0) { + dstY = -srcY; + srcY = 0; + srcHeight = Math.min(sourceHeight, initialHeight + srcY); + dstHeight = srcHeight; + } else if (srcY <= sourceHeight) { + dstY = 0; + srcHeight = Math.min(initialHeight, sourceHeight - srcY); + dstHeight = srcHeight; + } + + var params = [srcX, srcY, srcWidth, srcHeight]; + + // Avoid "IndexSizeError" + if (dstWidth > 0 && dstHeight > 0) { + var scale = width / initialWidth; + + params.push(dstX * scale, dstY * scale, dstWidth * scale, dstHeight * scale); + } + + // All the numerical parameters should be integer for `drawImage` + // https://github.com/fengyuanchen/cropper/issues/476 + context.drawImage.apply(context, [source].concat(toConsumableArray(params.map(function (param) { + return Math.floor(normalizeDecimalNumber(param)); + })))); + + return canvas; + }, + + + /** + * Change the aspect ratio of the crop box. + * @param {number} aspectRatio - The new aspect ratio. + * @returns {Cropper} this + */ + setAspectRatio: function setAspectRatio(aspectRatio) { + var options = this.options; + + + if (!this.disabled && !isUndefined(aspectRatio)) { + // 0 -> NaN + options.aspectRatio = Math.max(0, aspectRatio) || NaN; + + if (this.ready) { + this.initCropBox(); + + if (this.cropped) { + this.renderCropBox(); + } + } + } + + return this; + }, + + + /** + * Change the drag mode. + * @param {string} mode - The new drag mode. + * @returns {Cropper} this + */ + setDragMode: function setDragMode(mode) { + var options = this.options, + dragBox = this.dragBox, + face = this.face; + + + if (this.ready && !this.disabled) { + var croppable = mode === DRAG_MODE_CROP; + var movable = options.movable && mode === DRAG_MODE_MOVE; + + mode = croppable || movable ? mode : DRAG_MODE_NONE; + + options.dragMode = mode; + setData(dragBox, DATA_ACTION, mode); + toggleClass(dragBox, CLASS_CROP, croppable); + toggleClass(dragBox, CLASS_MOVE, movable); + + if (!options.cropBoxMovable) { + // Sync drag mode to crop box when it is not movable + setData(face, DATA_ACTION, mode); + toggleClass(face, CLASS_CROP, croppable); + toggleClass(face, CLASS_MOVE, movable); + } + } + + return this; + } + }; + + var AnotherCropper = WINDOW.Cropper; + + var Cropper = function () { + /** + * Create a new Cropper. + * @param {Element} element - The target element for cropping. + * @param {Object} [options={}] - The configuration options. + */ + function Cropper(element) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + classCallCheck(this, Cropper); + + if (!element || !REGEXP_TAG_NAME.test(element.tagName)) { + throw new Error('The first argument is required and must be an or element.'); + } + + this.element = element; + this.options = assign({}, DEFAULTS, isPlainObject(options) && options); + this.cropped = false; + this.disabled = false; + this.pointers = {}; + this.ready = false; + this.reloading = false; + this.replaced = false; + this.sized = false; + this.sizing = false; + this.init(); + } + + createClass(Cropper, [{ + key: 'init', + value: function init() { + var element = this.element; + + var tagName = element.tagName.toLowerCase(); + var url = void 0; + + if (getData(element, NAMESPACE)) { + return; + } + + setData(element, NAMESPACE, this); + + if (tagName === 'img') { + this.isImg = true; + + // e.g.: "img/picture.jpg" + url = element.getAttribute('src') || ''; + this.originalUrl = url; + + // Stop when it's a blank image + if (!url) { + return; + } + + // e.g.: "http://example.com/img/picture.jpg" + url = element.src; + } else if (tagName === 'canvas' && window.HTMLCanvasElement) { + url = element.toDataURL(); + } + + this.load(url); + } + }, { + key: 'load', + value: function load(url) { + var _this = this; + + if (!url) { + return; + } + + this.url = url; + this.imageData = {}; + + var element = this.element, + options = this.options; + + + if (!options.checkOrientation || !window.ArrayBuffer) { + this.clone(); + return; + } + + // XMLHttpRequest disallows to open a Data URL in some browsers like IE11 and Safari + if (REGEXP_DATA_URL.test(url)) { + if (REGEXP_DATA_URL_JPEG.test(url)) { + this.read(dataURLToArrayBuffer(url)); + } else { + this.clone(); + } + + return; + } + + var xhr = new XMLHttpRequest(); + + this.reloading = true; + this.xhr = xhr; + + var done = function done() { + _this.reloading = false; + _this.xhr = null; + }; + + xhr.ontimeout = done; + xhr.onabort = done; + xhr.onerror = function () { + done(); + _this.clone(); + }; + + xhr.onload = function () { + done(); + _this.read(xhr.response); + }; + + // Bust cache when there is a "crossOrigin" property + if (options.checkCrossOrigin && isCrossOriginURL(url) && element.crossOrigin) { + url = addTimestamp(url); + } + + xhr.open('get', url); + xhr.responseType = 'arraybuffer'; + xhr.withCredentials = element.crossOrigin === 'use-credentials'; + xhr.send(); + } + }, { + key: 'read', + value: function read(arrayBuffer) { + var options = this.options, + imageData = this.imageData; + + var orientation = getOrientation(arrayBuffer); + var rotate = 0; + var scaleX = 1; + var scaleY = 1; + + if (orientation > 1) { + this.url = arrayBufferToDataURL(arrayBuffer, 'image/jpeg'); + + var _parseOrientation = parseOrientation(orientation); + + rotate = _parseOrientation.rotate; + scaleX = _parseOrientation.scaleX; + scaleY = _parseOrientation.scaleY; + } + + if (options.rotatable) { + imageData.rotate = rotate; + } + + if (options.scalable) { + imageData.scaleX = scaleX; + imageData.scaleY = scaleY; + } + + this.clone(); + } + }, { + key: 'clone', + value: function clone() { + var element = this.element, + url = this.url; + + var crossOrigin = void 0; + var crossOriginUrl = void 0; + + if (this.options.checkCrossOrigin && isCrossOriginURL(url)) { + crossOrigin = element.crossOrigin; + + + if (crossOrigin) { + crossOriginUrl = url; + } else { + crossOrigin = 'anonymous'; + + // Bust cache when there is not a "crossOrigin" property + crossOriginUrl = addTimestamp(url); + } + } + + this.crossOrigin = crossOrigin; + this.crossOriginUrl = crossOriginUrl; + + var image = document.createElement('img'); + + if (crossOrigin) { + image.crossOrigin = crossOrigin; + } + + image.src = crossOriginUrl || url; + + var start = this.start.bind(this); + var stop = this.stop.bind(this); + + this.image = image; + this.onStart = start; + this.onStop = stop; + + if (this.isImg) { + if (element.complete) { + // start asynchronously to keep `this.cropper` is accessible in `ready` event handler. + this.timeout = setTimeout(start, 0); + } else { + addListener(element, EVENT_LOAD, start, { + once: true + }); + } + } else { + image.onload = start; + image.onerror = stop; + addClass(image, CLASS_HIDE); + element.parentNode.insertBefore(image, element.nextSibling); + } + } + }, { + key: 'start', + value: function start(event) { + var _this2 = this; + + var image = this.isImg ? this.element : this.image; + + if (event) { + image.onload = null; + image.onerror = null; + } + + this.sizing = true; + + var IS_SAFARI = WINDOW.navigator && /(Macintosh|iPhone|iPod|iPad).*AppleWebKit/i.test(WINDOW.navigator.userAgent); + var done = function done(naturalWidth, naturalHeight) { + assign(_this2.imageData, { + naturalWidth: naturalWidth, + naturalHeight: naturalHeight, + aspectRatio: naturalWidth / naturalHeight + }); + _this2.sizing = false; + _this2.sized = true; + _this2.build(); + }; + + // Modern browsers (except Safari) + if (image.naturalWidth && !IS_SAFARI) { + done(image.naturalWidth, image.naturalHeight); + return; + } + + var sizingImage = document.createElement('img'); + var body = document.body || document.documentElement; + + this.sizingImage = sizingImage; + + sizingImage.onload = function () { + done(sizingImage.width, sizingImage.height); + + if (!IS_SAFARI) { + body.removeChild(sizingImage); + } + }; + + sizingImage.src = image.src; + + // iOS Safari will convert the image automatically + // with its orientation once append it into DOM (#279) + if (!IS_SAFARI) { + sizingImage.style.cssText = 'left:0;' + 'max-height:none!important;' + 'max-width:none!important;' + 'min-height:0!important;' + 'min-width:0!important;' + 'opacity:0;' + 'position:absolute;' + 'top:0;' + 'z-index:-1;'; + body.appendChild(sizingImage); + } + } + }, { + key: 'stop', + value: function stop() { + var image = this.image; + + + image.onload = null; + image.onerror = null; + image.parentNode.removeChild(image); + this.image = null; + } + }, { + key: 'build', + value: function build() { + if (!this.sized || this.ready) { + return; + } + + var element = this.element, + options = this.options, + image = this.image; + + // Create cropper elements + + var container = element.parentNode; + var template = document.createElement('div'); + + template.innerHTML = TEMPLATE; + + var cropper = template.querySelector('.' + NAMESPACE + '-container'); + var canvas = cropper.querySelector('.' + NAMESPACE + '-canvas'); + var dragBox = cropper.querySelector('.' + NAMESPACE + '-drag-box'); + var cropBox = cropper.querySelector('.' + NAMESPACE + '-crop-box'); + var face = cropBox.querySelector('.' + NAMESPACE + '-face'); + + this.container = container; + this.cropper = cropper; + this.canvas = canvas; + this.dragBox = dragBox; + this.cropBox = cropBox; + this.viewBox = cropper.querySelector('.' + NAMESPACE + '-view-box'); + this.face = face; + + canvas.appendChild(image); + + // Hide the original image + addClass(element, CLASS_HIDDEN); + + // Inserts the cropper after to the current image + container.insertBefore(cropper, element.nextSibling); + + // Show the image if is hidden + if (!this.isImg) { + removeClass(image, CLASS_HIDE); + } + + this.initPreview(); + this.bind(); + + options.aspectRatio = Math.max(0, options.aspectRatio) || NaN; + options.viewMode = Math.max(0, Math.min(3, Math.round(options.viewMode))) || 0; + + addClass(cropBox, CLASS_HIDDEN); + + if (!options.guides) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-dashed'), CLASS_HIDDEN); + } + + if (!options.center) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-center'), CLASS_HIDDEN); + } + + if (options.background) { + addClass(cropper, NAMESPACE + '-bg'); + } + + if (!options.highlight) { + addClass(face, CLASS_INVISIBLE); + } + + if (options.cropBoxMovable) { + addClass(face, CLASS_MOVE); + setData(face, DATA_ACTION, ACTION_ALL); + } + + if (!options.cropBoxResizable) { + addClass(cropBox.getElementsByClassName(NAMESPACE + '-line'), CLASS_HIDDEN); + addClass(cropBox.getElementsByClassName(NAMESPACE + '-point'), CLASS_HIDDEN); + } + + this.render(); + this.ready = true; + this.setDragMode(options.dragMode); + + if (options.autoCrop) { + this.crop(); + } + + this.setData(options.data); + + if (isFunction(options.ready)) { + addListener(element, EVENT_READY, options.ready, { + once: true + }); + } + + dispatchEvent(element, EVENT_READY); + } + }, { + key: 'unbuild', + value: function unbuild() { + if (!this.ready) { + return; + } + + this.ready = false; + this.unbind(); + this.resetPreview(); + this.cropper.parentNode.removeChild(this.cropper); + removeClass(this.element, CLASS_HIDDEN); + } + }, { + key: 'uncreate', + value: function uncreate() { + var element = this.element; + + + if (this.ready) { + this.unbuild(); + this.ready = false; + this.cropped = false; + } else if (this.sizing) { + this.sizingImage.onload = null; + this.sizing = false; + this.sized = false; + } else if (this.reloading) { + this.xhr.abort(); + } else if (this.isImg) { + if (element.complete) { + clearTimeout(this.timeout); + } else { + removeListener(element, EVENT_LOAD, this.onStart); + } + } else if (this.image) { + this.stop(); + } + } + + /** + * Get the no conflict cropper class. + * @returns {Cropper} The cropper class. + */ + + }], [{ + key: 'noConflict', + value: function noConflict() { + window.Cropper = AnotherCropper; + return Cropper; + } + + /** + * Change the default options. + * @param {Object} options - The new default options. + */ + + }, { + key: 'setDefaults', + value: function setDefaults(options) { + assign(DEFAULTS, isPlainObject(options) && options); + } + }]); + return Cropper; + }(); + + assign(Cropper.prototype, render, preview, events, handlers, change, methods); + + if ($.fn) { + var AnotherCropper$1 = $.fn.cropper; + var NAMESPACE$1 = 'cropper'; + + $.fn.cropper = function jQueryCropper(option) { + for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + args[_key - 1] = arguments[_key]; + } + + var result = void 0; + + this.each(function (i, element) { + var $element = $(element); + var isDestroy = option === 'destroy'; + var cropper = $element.data(NAMESPACE$1); + + if (!cropper) { + if (isDestroy) { + return; + } + + var options = $.extend({}, $element.data(), $.isPlainObject(option) && option); + + cropper = new Cropper(element, options); + $element.data(NAMESPACE$1, cropper); + } + + if (typeof option === 'string') { + var fn = cropper[option]; + + if ($.isFunction(fn)) { + result = fn.apply(cropper, args); + + if (result === cropper) { + result = undefined; + } + + if (isDestroy) { + $element.removeData(NAMESPACE$1); + } + } + } + }); + + return result !== undefined ? result : this; + }; + + $.fn.cropper.Constructor = Cropper; + $.fn.cropper.setDefaults = Cropper.setDefaults; + $.fn.cropper.noConflict = function noConflict() { + $.fn.cropper = AnotherCropper$1; + return this; + }; + } + +}))); diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.min.css b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.min.css new file mode 100644 index 0000000000000000000000000000000000000000..96d5de71bd4a12ff1702fe7d50affc3d0d203b06 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.min.css @@ -0,0 +1,9 @@ +/*! + * Cropper v4.0.0 + * https://github.com/fengyuanchen/cropper + * + * Copyright (c) 2014-2018 Chen Fengyuan + * Released under the MIT license + * + * Date: 2018-04-01T06:26:32.417Z + */.cropper-container{direction:ltr;font-size:0;line-height:0;position:relative;-ms-touch-action:none;touch-action:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.cropper-container img{display:block;height:100%;image-orientation:0deg;max-height:none!important;max-width:none!important;min-height:0!important;min-width:0!important;width:100%}.cropper-canvas,.cropper-crop-box,.cropper-drag-box,.cropper-modal,.cropper-wrap-box{bottom:0;left:0;position:absolute;right:0;top:0}.cropper-canvas,.cropper-wrap-box{overflow:hidden}.cropper-drag-box{background-color:#fff;opacity:0}.cropper-modal{background-color:#000;opacity:.5}.cropper-view-box{display:block;height:100%;outline-color:rgba(51,153,255,.75);outline:1px solid #39f;overflow:hidden;width:100%}.cropper-dashed{border:0 dashed #eee;display:block;opacity:.5;position:absolute}.cropper-dashed.dashed-h{border-bottom-width:1px;border-top-width:1px;height:33.33333%;left:0;top:33.33333%;width:100%}.cropper-dashed.dashed-v{border-left-width:1px;border-right-width:1px;height:100%;left:33.33333%;top:0;width:33.33333%}.cropper-center{display:block;height:0;left:50%;opacity:.75;position:absolute;top:50%;width:0}.cropper-center:after,.cropper-center:before{background-color:#eee;content:" ";display:block;position:absolute}.cropper-center:before{height:1px;left:-3px;top:0;width:7px}.cropper-center:after{height:7px;left:0;top:-3px;width:1px}.cropper-face,.cropper-line,.cropper-point{display:block;height:100%;opacity:.1;position:absolute;width:100%}.cropper-face{background-color:#fff;left:0;top:0}.cropper-line{background-color:#39f}.cropper-line.line-e{cursor:ew-resize;right:-3px;top:0;width:5px}.cropper-line.line-n{cursor:ns-resize;height:5px;left:0;top:-3px}.cropper-line.line-w{cursor:ew-resize;left:-3px;top:0;width:5px}.cropper-line.line-s{bottom:-3px;cursor:ns-resize;height:5px;left:0}.cropper-point{background-color:#39f;height:5px;opacity:.75;width:5px}.cropper-point.point-e{cursor:ew-resize;margin-top:-3px;right:-3px;top:50%}.cropper-point.point-n{cursor:ns-resize;left:50%;margin-left:-3px;top:-3px}.cropper-point.point-w{cursor:ew-resize;left:-3px;margin-top:-3px;top:50%}.cropper-point.point-s{bottom:-3px;cursor:s-resize;left:50%;margin-left:-3px}.cropper-point.point-ne{cursor:nesw-resize;right:-3px;top:-3px}.cropper-point.point-nw{cursor:nwse-resize;left:-3px;top:-3px}.cropper-point.point-sw{bottom:-3px;cursor:nesw-resize;left:-3px}.cropper-point.point-se{bottom:-3px;cursor:nwse-resize;height:20px;opacity:1;right:-3px;width:20px}@media (min-width:768px){.cropper-point.point-se{height:15px;width:15px}}@media (min-width:992px){.cropper-point.point-se{height:10px;width:10px}}@media (min-width:1200px){.cropper-point.point-se{height:5px;opacity:.75;width:5px}}.cropper-point.point-se:before{background-color:#39f;bottom:-50%;content:" ";display:block;height:200%;opacity:0;position:absolute;right:-50%;width:200%}.cropper-invisible{opacity:0}.cropper-bg{background-image:url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAQMAAAAlPW0iAAAAA3NCSVQICAjb4U/gAAAABlBMVEXMzMz////TjRV2AAAACXBIWXMAAArrAAAK6wGCiw1aAAAAHHRFWHRTb2Z0d2FyZQBBZG9iZSBGaXJld29ya3MgQ1M26LyyjAAAABFJREFUCJlj+M/AgBVhF/0PAH6/D/HkDxOGAAAAAElFTkSuQmCC")}.cropper-hide{display:block;height:0;position:absolute;width:0}.cropper-hidden{display:none!important}.cropper-move{cursor:move}.cropper-crop{cursor:crosshair}.cropper-disabled .cropper-drag-box,.cropper-disabled .cropper-face,.cropper-disabled .cropper-line,.cropper-disabled .cropper-point{cursor:not-allowed} \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.min.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.min.js new file mode 100644 index 0000000000000000000000000000000000000000..4b451141a6346a28270bf5c9c17cba01d58cbcaf --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/cropper/cropper.min.js @@ -0,0 +1,10 @@ +/*! + * Cropper v4.0.0 + * https://github.com/fengyuanchen/cropper + * + * Copyright (c) 2014-2018 Chen Fengyuan + * Released under the MIT license + * + * Date: 2018-04-01T06:27:27.267Z + */ +!function(t,i){"object"==typeof exports&&"undefined"!=typeof module?i(require("jquery")):"function"==typeof define&&define.amd?define(["jquery"],i):i(t.jQuery)}(this,function(d){"use strict";d=d&&d.hasOwnProperty("default")?d.default:d;var n="undefined"!=typeof window,r=n?window:{},l="cropper",k="all",T="crop",W="move",E="zoom",H="e",N="w",L="s",O="n",z="ne",Y="nw",X="se",R="sw",h=l+"-crop",t=l+"-disabled",S=l+"-hidden",p=l+"-hide",o=l+"-modal",m=l+"-move",u="action",g="preview",s="crop",c="move",f="none",a="crop",v="cropend",w="cropmove",b="cropstart",x="dblclick",y="load",M=r.PointerEvent?"pointerdown":"touchstart mousedown",C=r.PointerEvent?"pointermove":"touchmove mousemove",D=r.PointerEvent?"pointerup pointercancel":"touchend touchcancel mouseup",B="ready",A="resize",I="wheel mousewheel DOMMouseScroll",j="zoom",U=/^(?:e|w|s|n|se|sw|ne|nw|all|crop|move|zoom)$/,P=/^data:/,q=/^data:image\/jpeg;base64,/,$=/^(?:img|canvas)$/i,Q={viewMode:0,dragMode:s,aspectRatio:NaN,data:null,preview:"",responsive:!0,restore:!0,checkCrossOrigin:!0,checkOrientation:!0,modal:!0,guides:!0,center:!0,highlight:!0,background:!0,autoCrop:!0,autoCropArea:.8,movable:!0,rotatable:!0,scalable:!0,zoomable:!0,zoomOnTouch:!0,zoomOnWheel:!0,wheelZoomRatio:.1,cropBoxMovable:!0,cropBoxResizable:!0,toggleDragModeOnDblclick:!0,minCanvasWidth:0,minCanvasHeight:0,minCropBoxWidth:0,minCropBoxHeight:0,minContainerWidth:200,minContainerHeight:100,ready:null,cropstart:null,cropmove:null,cropend:null,crop:null,zoom:null},i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},F=function(t,i){if(!(t instanceof i))throw new TypeError("Cannot call a class as a function")},Z=function(){function a(t,i){for(var e=0;et.width?3===e?r=t.height*h:s=t.width/h:3===e?s=t.width/h:r=t.height*h;var c={aspectRatio:h,naturalWidth:n,naturalHeight:o,width:r,height:s};c.left=(t.width-r)/2,c.top=(t.height-s)/2,c.oldLeft=c.left,c.oldTop=c.top,this.canvasData=c,this.limited=1===e||2===e,this.limitCanvas(!0,!0),this.initialImageData=et({},i),this.initialCanvasData=et({},c)},limitCanvas:function(t,i){var e=this.options,a=this.containerData,n=this.canvasData,o=this.cropBoxData,h=e.viewMode,r=n.aspectRatio,s=this.cropped&&o;if(t){var c=Number(e.minCanvasWidth)||0,d=Number(e.minCanvasHeight)||0;1=a.width&&(n.minLeft=Math.min(0,p),n.maxLeft=Math.max(0,p)),n.height>=a.height&&(n.minTop=Math.min(0,m),n.maxTop=Math.max(0,m))))}else n.minLeft=-n.width,n.minTop=-n.height,n.maxLeft=a.width,n.maxTop=a.height},renderCanvas:function(t,i){var e=this.canvasData,a=this.imageData;if(i){var n=function(t){var i=t.width,e=t.height,a=t.degree;if(90==(a=Math.abs(a)%180))return{width:e,height:i};var n=a%90*Math.PI/180,o=Math.sin(n),h=Math.cos(n),r=i*h+e*o,s=i*o+e*h;return 90e.maxWidth||e.widthe.maxHeight||e.heighti.width?n.height=n.width/e:n.width=n.height*e),this.cropBoxData=n,this.limitCropBox(!0,!0),n.width=Math.min(Math.max(n.width,n.minWidth),n.maxWidth),n.height=Math.min(Math.max(n.height,n.minHeight),n.maxHeight),n.width=Math.max(n.minWidth,n.width*a),n.height=Math.max(n.minHeight,n.height*a),n.left=i.left+(i.width-n.width)/2,n.top=i.top+(i.height-n.height)/2,n.oldLeft=n.left,n.oldTop=n.top,this.initialCropBoxData=et({},n)},limitCropBox:function(t,i){var e=this.options,a=this.containerData,n=this.canvasData,o=this.cropBoxData,h=this.limited,r=e.aspectRatio;if(t){var s=Number(e.minCropBoxWidth)||0,c=Number(e.minCropBoxHeight)||0,d=Math.min(a.width,h?n.width:a.width),l=Math.min(a.height,h?n.height:a.height);s=Math.min(s,a.width),c=Math.min(c,a.height),r&&(s&&c?se.maxWidth||e.widthe.maxHeight||e.height=i.width&&e.height>=i.height?W:k),ot(this.cropBox,et({width:e.width,height:e.height},kt({translateX:e.left,translateY:e.top}))),this.cropped&&this.limited&&this.limitCanvas(!0,!0),this.disabled||this.output()},output:function(){this.preview(),wt(this.element,a,this.getData())}},zt={initPreview:function(){var e=this.crossOrigin,t=this.options.preview,a=e?this.crossOriginUrl:this.url,i=document.createElement("img");if(e&&(i.crossOrigin=e),i.src=a,this.viewBox.appendChild(i),this.viewBoxImage=i,t){var n=t;"string"==typeof t?n=this.element.ownerDocument.querySelectorAll(t):t.querySelector&&(n=[t]),it(this.previews=n,function(t){var i=document.createElement("img");pt(t,g,{width:t.offsetWidth,height:t.offsetHeight,html:t.innerHTML}),e&&(i.crossOrigin=e),i.src=a,i.style.cssText='display:block;width:100%;height:auto;min-width:0!important;min-height:0!important;max-width:none!important;max-height:none!important;image-orientation:0deg!important;"',t.innerHTML="",t.appendChild(i)})}},resetPreview:function(){it(this.previews,function(t){var i=lt(t,g);ot(t,{width:i.width,height:i.height}),t.innerHTML=i.html,mt(t,g)})},preview:function(){var r=this.imageData,t=this.canvasData,i=this.cropBoxData,s=i.width,c=i.height,d=r.width,l=r.height,p=i.left-t.left-r.left,m=i.top-t.top-r.top;this.cropped&&!this.disabled&&(ot(this.viewBoxImage,et({width:d,height:l},kt(et({translateX:-p,translateY:-m},r)))),it(this.previews,function(t){var i=lt(t,g),e=i.width,a=i.height,n=e,o=a,h=1;s&&(o=c*(h=e/s)),c&&af&&(D.x=f-p);break;case N:s+D.xv&&(D.y=v-m)}};switch(h){case k:s+=D.x,c+=D.y;break;case H:if(0<=D.x&&(f<=p||r&&(c<=g||v<=m))){w=!1;break}B(H),d+=D.x,r&&(l=d/r,c-=D.x/r/2),d<0&&(h=N,d=0);break;case O:if(D.y<=0&&(c<=g||r&&(s<=u||f<=p))){w=!1;break}B(O),l-=D.y,c+=D.y,r&&(d=l*r,s+=D.y*r/2),l<0&&(h=L,l=0);break;case N:if(D.x<=0&&(s<=u||r&&(c<=g||v<=m))){w=!1;break}B(N),d-=D.x,s+=D.x,r&&(l=d/r,c+=D.x/r/2),d<0&&(h=H,d=0);break;case L:if(0<=D.y&&(v<=m||r&&(s<=u||f<=p))){w=!1;break}B(L),l+=D.y,r&&(d=l*r,s-=D.y*r/2),l<0&&(h=O,l=0);break;case z:if(r){if(D.y<=0&&(c<=g||f<=p)){w=!1;break}B(O),l-=D.y,c+=D.y,d=l*r}else B(O),B(H),0<=D.x?p or element.");this.element=t,this.options=et({},Q,_(i)&&i),this.cropped=!1,this.disabled=!1,this.pointers={},this.ready=!1,this.reloading=!1,this.replaced=!1,this.sized=!1,this.sizing=!1,this.init()}return Z(e,[{key:"init",value:function(){var t=this.element,i=t.tagName.toLowerCase(),e=void 0;if(!lt(t,l)){if(pt(t,l,this),"img"===i){if(this.isImg=!0,e=t.getAttribute("src")||"",!(this.originalUrl=e))return;e=t.src}else"canvas"===i&&window.HTMLCanvasElement&&(e=t.toDataURL());this.load(e)}}},{key:"load",value:function(t){var i=this;if(t){this.url=t,this.imageData={};var e=this.element,a=this.options;if(a.checkOrientation&&window.ArrayBuffer)if(P.test(t))q.test(t)?this.read((n=t.replace(Nt,""),o=atob(n),h=new ArrayBuffer(o.length),it(r=new Uint8Array(h),function(t,i){r[i]=o.charCodeAt(i)}),h)):this.clone();else{var n,o,h,r,s=new XMLHttpRequest;this.reloading=!0,this.xhr=s;var c=function(){i.reloading=!1,i.xhr=null};s.ontimeout=c,s.onabort=c,s.onerror=function(){c(),i.clone()},s.onload=function(){c(),i.read(s.response)},a.checkCrossOrigin&&Dt(t)&&e.crossOrigin&&(t=Bt(t)),s.open("get",t),s.responseType="arraybuffer",s.withCredentials="use-credentials"===e.crossOrigin,s.send()}else this.clone()}}},{key:"read",value:function(t){var i,e,a,n=this.options,o=this.imageData,h=Lt(t),r=0,s=1,c=1;if(1
';var o=n.querySelector("."+l+"-container"),h=o.querySelector("."+l+"-canvas"),r=o.querySelector("."+l+"-drag-box"),s=o.querySelector("."+l+"-crop-box"),c=s.querySelector("."+l+"-face");this.container=a,this.cropper=o,this.canvas=h,this.dragBox=r,this.cropBox=s,this.viewBox=o.querySelector("."+l+"-view-box"),this.face=c,h.appendChild(e),ht(t,S),a.insertBefore(o,t.nextSibling),this.isImg||rt(e,p),this.initPreview(),this.bind(),i.aspectRatio=Math.max(0,i.aspectRatio)||NaN,i.viewMode=Math.max(0,Math.min(3,Math.round(i.viewMode)))||0,ht(s,S),i.guides||ht(s.getElementsByClassName(l+"-dashed"),S),i.center||ht(s.getElementsByClassName(l+"-center"),S),i.background&&ht(o,l+"-bg"),i.highlight||ht(c,"cropper-invisible"),i.cropBoxMovable&&(ht(c,m),pt(c,u,k)),i.cropBoxResizable||(ht(s.getElementsByClassName(l+"-line"),S),ht(s.getElementsByClassName(l+"-point"),S)),this.render(),this.ready=!0,this.setDragMode(i.dragMode),i.autoCrop&&this.crop(),this.setData(i.data),tt(i.ready)&&vt(t,B,i.ready,{once:!0}),wt(t,B)}}},{key:"unbuild",value:function(){this.ready&&(this.ready=!1,this.unbind(),this.resetPreview(),this.cropper.parentNode.removeChild(this.cropper),rt(this.element,S))}},{key:"uncreate",value:function(){var t=this.element;this.ready?(this.unbuild(),this.ready=!1,this.cropped=!1):this.sizing?(this.sizingImage.onload=null,this.sizing=!1,this.sized=!1):this.reloading?this.xhr.abort():this.isImg?t.complete?clearTimeout(this.timeout):ft(t,y,this.onStart):this.image&&this.stop()}}],[{key:"noConflict",value:function(){return window.Cropper=At,e}},{key:"setDefaults",value:function(t){et(Q,_(t)&&t)}}]),e}();if(et(It.prototype,Ot,zt,Yt,Xt,Rt,St),d.fn){var jt=d.fn.cropper,Ut="cropper";d.fn.cropper=function(r){for(var t=arguments.length,s=Array(1 div { + display: inline-block;; + margin-right: 10px; +} +.img-preview { + overflow: hidden; +} +.img-preview-box .img-preview-lg { + width: 150px; + height: 150px; +} +.img-preview-box .img-preview-md { + width: 100px; + height: 100px; +} +.img-preview-box .img-preview-sm { + width: 50px; + height: 50px; + border-radius: 50%; +} \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/library.css b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/library.css new file mode 100644 index 0000000000000000000000000000000000000000..5cccbc3e2e2424adc27a3c02c94fceab90fc1018 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/library.css @@ -0,0 +1,167 @@ +body{ + background-color: #edf7f9; !important; + font-family: Arial, 黑体, Tahoma; +} +.nav_library{ + width: 100%; + height: 80px; + background-color: #155070; + padding-left: 50px; + padding-right: 50px; + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + justify-content: space-between; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; + box-sizing: border-box; +} +.nav_left{ + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + justify-content: space-between; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; +} +.nav_left img{ + width: 60px; + height: 60px; + margin-right: 20px; +} +.nav_left p span{ + font-size: 24px; + font-weight: bold; + color: #ffffff; +} +.container_top{ + height: 45px; + line-height: 45px; + font-weight: bold; + border-bottom: 1px solid #ccc; +} +.top_insert{ + width: 100%; + background-color: #ffffff; + padding: 45px 50px 20px; + margin-bottom: 10px; +} +.del_con span{ + color: #cccccc; + margin-left: 20px; +} +.bot_pic_con{ + background-color: #ffffff; + padding: 50px; +} +.news_box{ + margin-bottom: 20px; + overflow: hidden; +} +.news_box li{ + margin-bottom: 20px; +} +.row_dif{ + margin-left: -10px; + margin-right: -10px; +} +.small_card{ + padding-left: 10px; + padding-right: 10px; + position: relative; +} +.news_box li a img { + margin-bottom: 20px; + width: 100%; +} +.p_title{ + margin-bottom:10px ; + text-align: center; +} +.p_title span { + color: #283138; + overflow: hidden; + text-overflow: ellipsis; + display: -webkit-box; + -webkit-line-clamp: 1; + -webkit-box-orient: vertical; + font-weight: 600; + font-size: 15px; +} +.p_con span { + color: #283138; + overflow: hidden; + text-overflow: ellipsis; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + line-height: 1.6; +} +.pic_check{ + position: absolute; + top: 10px; + left: 30px; +} +/*分页*/ +.fenye{ + width: 100%; + padding-bottom: 20px; + overflow: hidden; +} +.fenye .fenye_list{ + text-align: center; + overflow: hidden; + float: right; +} +.fenye_list a{ + color: #283138; +} +#fenye_list{ + overflow: hidden; + float: left; +} +.fenye .fenye_list ul li{ + float: left; + margin-right: 10px; + list-style-type: none; +} +.fensiprev,.fensinext{ + line-height: 30px; + display: block; + width: 60px; + height: 30px; + text-decoration: none; + cursor: pointer; +} +.fenye .fenye_list ul li .ye{ + display: block; + height: 30px; + width: 30px; + text-align: center; + line-height: 30px; + text-decoration: none; + cursor: pointer; +} +.fenye_list .page{ + display: block; + height: 30px; + float: left; + text-align: center; + line-height: 30px; + text-decoration: none; +} +.fenye_list #fenye_list li .index{ + background: #01b9d1; + color: white; +} +.fenye .fenye_list ul li .ye:hover{ + background: #01b9d1; + color: white; +} +/*分页*/ \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/list.css b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/list.css new file mode 100644 index 0000000000000000000000000000000000000000..5b2a9fea5b1d0bec28afb6babfc77464b82bf51a --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/list.css @@ -0,0 +1,102 @@ +body{ + background-color: #edf7f9; + font-family: Arial, 黑体, Tahoma; +} + +.nav_library{ + width: 100%; + height: 80px; + background-color: #155070; + padding-left: 50px; + padding-right: 50px; + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + justify-content: space-between; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; + box-sizing: border-box; +} +.nav_left{ + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + justify-content: space-between; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; +} +.nav_left img{ + width: 60px; + height: 60px; + margin-right: 20px; +} +.nav_left p span{ + font-size: 24px; + font-weight: bold; + color: #ffffff; +} + +.container_top{ + height: 45px; + line-height: 45px; + font-weight: bold; + border-bottom: 1px solid #ccc; + margin: 0 auto; +} + +.box_content{ + /* width: 1024px; */ + margin: 0 auto; +} +.content_bot{ + /* background-color: #ffffff; */ + margin: 0 auto; +} +#mytable th,td{ + background-color:#ffffff; + height: 40px; + text-align: center; +} +.video_box{ + background-color: #edf7f9; +} +.video_fps{ + text-align: center; +} + +.video_fps p{ + height: 45px; + line-height: 45px; + font-weight: bold; + text-align: center; +} +.video_inner{ + height:400px; + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: center; + -webkit-justify-content: center; + justify-content: center; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; +} + +.video_loading{ + height:400px; + display: flex; + justify-content: center; + align-items: center; +} +#canvas{ + /* vertical-align:top; */ + max-width:100%; + max-height:400px; +} diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/testvideo.css b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/testvideo.css new file mode 100644 index 0000000000000000000000000000000000000000..52316c200a3ef7859fff4ee78e654bfce02d0f7c --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/css/testvideo.css @@ -0,0 +1,71 @@ +body{ + background-color: #edf7f9; + font-family: Arial, 宋体, Tahoma; +} + +.nav{ + width: 100%; + height: 80px; + background-color: #155070; + padding-left: 50px; + padding-right: 50px; + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + justify-content: space-between; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; + box-sizing: border-box; +} +.nav_left{ + display: -webkit-box; + display: -webkit-flex; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + justify-content: space-between; + -webkit-box-align: center; + -webkit-align-items: center; + align-items: center; +} +.nav_left img{ + width: 60px; + height: 60px; + margin-right: 20px; +} +.nav_left p span{ + font-size: 24px; + font-weight: bold; + color: #ffffff; +} +.refresh{ + position: absolute; + left: 50%; + top: 50%; + margin: -6px 0 0 -6px ; + +} + +.video_top{ + width: 1024px; + margin: 0 auto; + height: 50px; + line-height: 50px; + font-weight: bold; + +} +.video_content{ + width: 1024px; + margin: 0 auto; +} +.video_fps{ + text-align: right; + padding-right: 100px; +} +.video_inner{ + text-align: center; + position: relative; +} diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/loading.gif b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/loading.gif new file mode 100644 index 0000000000000000000000000000000000000000..227509e2d48a816161c742f2e36d0c04bd1398fc Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/loading.gif differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/logo.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..f5bb6da89222cda01384f7ce1221330b92ebdd40 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/logo.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u1.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u1.png new file mode 100644 index 0000000000000000000000000000000000000000..61c2e2e64ac085f26a1b99d1e81c8b319cbda104 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u1.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u2.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u2.png new file mode 100644 index 0000000000000000000000000000000000000000..b2a67d430ad943058e1bef137b9c654185ed8df0 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u2.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u3.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u3.png new file mode 100644 index 0000000000000000000000000000000000000000..927cded1f0359366e18549a48bd8ea9304d1daad Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u3.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u4.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u4.png new file mode 100644 index 0000000000000000000000000000000000000000..1e7b37fe5f63bab3010aa82a53440c5c9d2aab95 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u4.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u5.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u5.png new file mode 100644 index 0000000000000000000000000000000000000000..9dc805f115fdcb63db312af47aff9f3f1a585689 Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u5.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u6.png b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u6.png new file mode 100644 index 0000000000000000000000000000000000000000..8e57d8f7f8e92cfa6fc117dd7cad9455a6bf2cea Binary files /dev/null and b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/images/u6.png differ diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/dialog.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/dialog.js new file mode 100644 index 0000000000000000000000000000000000000000..b831ca6585a0ba4afbdc029974b5d5122e43fa5f --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/dialog.js @@ -0,0 +1,97 @@ +var dialog = (function Dialog() { + var $mask = $("
"); + + //title-- + //content--object{type[1 for input, 0 for text], text[display in content]} + //btnFlag--1 for OK+Cancel, 0 for OK + function initHtml(title, content, btnFlag, ok, cancel) { + var div = ""; + div += "
"; + div += "

" + title + "

" + div += "
"; + if (content.type == 0) { + div += "

" + content.text + "

" + } else if (content.type == 1) { + div += ""; + } + if (btnFlag == 0) { + div += "
OK
"; + } else if (btnFlag == 1) { + div += "
OKCancel
"; + } + div += "
"; + $mask.html(div); + $mask.find("input").val($mask.find("input").val()); + if (content.type == 1) { + $mask.find(".ok").on("click", function() { + var retText = $mask.find("input").val(); + hide(); + if (ok) { + ok(retText); + } + }); + $mask.find(".cancel").on("click", function() { + var retText = $mask.find("input").val(); + hide(); + if (cancel) { + cancel(retText); + } + }); + } else { + $mask.find(".ok").on("click", function() { + hide(); + if (ok) { + ok(); + } + }); + $mask.find(".cancel").on("click", function() { + hide(); + if (cancel) { + cancel(); + } + }); + } + } + + function calcBoxPos() { + var $box = $mask.find("#dlg-box"); + var mask_w = $mask.outerWidth(); + var mask_h = $mask.outerHeight(); + var box_w = $box.outerWidth(); + var box_h = $box.outerHeight(); + var pos_left = (mask_w - box_w) / 2 + "px"; + var pos_top = (mask_h - box_h) / 2 + "px"; + $box.css("left", pos_left).css("top", pos_top); + } + + function show() { + $("body").prepend($mask); + $mask.css("display", "block"); + if ($mask.find("input") && $mask.find("input")[0]) { + $mask.find("input")[0].focus(); + } + calcBoxPos(); + $(window).resize(calcBoxPos); + } + + function hide() { + $mask.css("display", "none"); + $mask.remove(); + } + + return { + hide: hide, + tip: function(title, text, ok) { + initHtml(title, { type: 0, text: text }, 0, ok, null); + show(); + }, + input: function(title, text, placeholder, ok, cancel) { + initHtml(title, { type: 1, text: text, placeholder: placeholder }, 1, ok, cancel); + show(); + }, + confirm: function(title, text, ok, cancel) { + initHtml(title, { type: 0, text: text }, 1, ok, cancel); + show(); + } + } +})(); \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/dialog.min.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/dialog.min.js new file mode 100644 index 0000000000000000000000000000000000000000..b2c094c70ca29c09baa4040dec5e2a7e4208b846 --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/dialog.min.js @@ -0,0 +1 @@ +var dialog = (function Dialog() { var e = $("
"); function d(j, h, i, f, g) { var k = ""; k += "
"; k += "

" + j + "

"; if (h.type == 0) { k += "

" + h.text + "

" } else { if (h.type == 1) { k += "" } } if (i == 0) { k += "
OK
" } else { if (i == 1) { k += "
OKCancel
" } } k += "
"; e.html(k); e.find("input").val(e.find("input").val()); if (h.type == 1) { e.find(".ok").on("click", function() { var l = e.find("input").val(); b(); if (f) { f(l) } }); e.find(".cancel").on("click", function() { var l = e.find("input").val(); b(); if (g) { g(l) } }) } else { e.find(".ok").on("click", function() { b(); if (f) { f() } }); e.find(".cancel").on("click", function() { b(); if (g) { g() } }) } } function c() { var k = e.find("#dlg-box"); var h = e.outerWidth(); var j = e.outerHeight(); var l = k.outerWidth(); var g = k.outerHeight(); var f = (h - l) / 2 + "px"; var i = (j - g) / 2 + "px"; k.css("left", f).css("top", i) } function a() { $("body").prepend(e); e.css("display", "block"); if (e.find("input") && e.find("input")[0]) { e.find("input")[0].focus() } c(); $(window).resize(c) } function b() { e.css("display", "none"); e.remove() } return { hide: b, tip: function(h, g, f) { d(h, { type: 0, text: g }, 0, f, null); a() }, input: function(j, i, h, f, g) { d(j, { type: 1, text: i, placeholder: h }, 1, f, g); a() }, confirm: function(i, h, f, g) { d(i, { type: 0, text: h }, 1, f, g); a() } } })(); \ No newline at end of file diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/index.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/index.js new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/jquery-1.10.2.min.js b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/jquery-1.10.2.min.js new file mode 100644 index 0000000000000000000000000000000000000000..8569bc49f4de4cb9e6cb72b77bc245bb1fcd34fe --- /dev/null +++ b/cplusplus/level2_simple_inference/n_performance/1_multi_process_thread/face_recognition_camera/presenterserver/facial_recognition/ui/static/js/jquery-1.10.2.min.js @@ -0,0 +1,4 @@ +/*! jQuery v1.10.2 | (c) 2005, 2013 jQuery Foundation, Inc. | jquery.org/license*/ +(function(e,t){var n,r,i=typeof t,o=e.location,a=e.document,s=a.documentElement,l=e.jQuery,u=e.$,c={},p=[],f="1.10.2",d=p.concat,h=p.push,g=p.slice,m=p.indexOf,y=c.toString,v=c.hasOwnProperty,b=f.trim,x=function(e,t){return new x.fn.init(e,t,r)},w=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,T=/\S+/g,C=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,N=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,k=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,E=/^[\],:{}\s]*$/,S=/(?:^|:|,)(?:\s*\[)+/g,A=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,j=/"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g,D=/^-ms-/,L=/-([\da-z])/gi,H=function(e,t){return t.toUpperCase()},q=function(e){(a.addEventListener||"load"===e.type||"complete"===a.readyState)&&(_(),x.ready())},_=function(){a.addEventListener?(a.removeEventListener("DOMContentLoaded",q,!1),e.removeEventListener("load",q,!1)):(a.detachEvent("onreadystatechange",q),e.detachEvent("onload",q))};x.fn=x.prototype={jquery:f,constructor:x,init:function(e,n,r){var i,o;if(!e)return this;if("string"==typeof e){if(i="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&&e.length>=3?[null,e,null]:N.exec(e),!i||!i[1]&&n)return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e);if(i[1]){if(n=n instanceof x?n[0]:n,x.merge(this,x.parseHTML(i[1],n&&n.nodeType?n.ownerDocument||n:a,!0)),k.test(i[1])&&x.isPlainObject(n))for(i in n)x.isFunction(this[i])?this[i](n[i]):this.attr(i,n[i]);return this}if(o=a.getElementById(i[2]),o&&o.parentNode){if(o.id!==i[2])return r.find(e);this.length=1,this[0]=o}return this.context=a,this.selector=e,this}return e.nodeType?(this.context=this[0]=e,this.length=1,this):x.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),x.makeArray(e,this))},selector:"",length:0,toArray:function(){return g.call(this)},get:function(e){return null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var t=x.merge(this.constructor(),e);return t.prevObject=this,t.context=this.context,t},each:function(e,t){return x.each(this,e,t)},ready:function(e){return x.ready.promise().done(e),this},slice:function(){return this.pushStack(g.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(0>e?t:0);return this.pushStack(n>=0&&t>n?[this[n]]:[])},map:function(e){return this.pushStack(x.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:h,sort:[].sort,splice:[].splice},x.fn.init.prototype=x.fn,x.extend=x.fn.extend=function(){var e,n,r,i,o,a,s=arguments[0]||{},l=1,u=arguments.length,c=!1;for("boolean"==typeof s&&(c=s,s=arguments[1]||{},l=2),"object"==typeof s||x.isFunction(s)||(s={}),u===l&&(s=this,--l);u>l;l++)if(null!=(o=arguments[l]))for(i in o)e=s[i],r=o[i],s!==r&&(c&&r&&(x.isPlainObject(r)||(n=x.isArray(r)))?(n?(n=!1,a=e&&x.isArray(e)?e:[]):a=e&&x.isPlainObject(e)?e:{},s[i]=x.extend(c,a,r)):r!==t&&(s[i]=r));return s},x.extend({expando:"jQuery"+(f+Math.random()).replace(/\D/g,""),noConflict:function(t){return e.$===x&&(e.$=u),t&&e.jQuery===x&&(e.jQuery=l),x},isReady:!1,readyWait:1,holdReady:function(e){e?x.readyWait++:x.ready(!0)},ready:function(e){if(e===!0?!--x.readyWait:!x.isReady){if(!a.body)return setTimeout(x.ready);x.isReady=!0,e!==!0&&--x.readyWait>0||(n.resolveWith(a,[x]),x.fn.trigger&&x(a).trigger("ready").off("ready"))}},isFunction:function(e){return"function"===x.type(e)},isArray:Array.isArray||function(e){return"array"===x.type(e)},isWindow:function(e){return null!=e&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?c[y.call(e)]||"object":typeof e},isPlainObject:function(e){var n;if(!e||"object"!==x.type(e)||e.nodeType||x.isWindow(e))return!1;try{if(e.constructor&&!v.call(e,"constructor")&&!v.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(r){return!1}if(x.support.ownLast)for(n in e)return v.call(e,n);for(n in e);return n===t||v.call(e,n)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return null;"boolean"==typeof t&&(n=t,t=!1),t=t||a;var r=k.exec(e),i=!n&&[];return r?[t.createElement(r[1])]:(r=x.buildFragment([e],t,i),i&&x(i).remove(),x.merge([],r.childNodes))},parseJSON:function(n){return e.JSON&&e.JSON.parse?e.JSON.parse(n):null===n?n:"string"==typeof n&&(n=x.trim(n),n&&E.test(n.replace(A,"@").replace(j,"]").replace(S,"")))?Function("return "+n)():(x.error("Invalid JSON: "+n),t)},parseXML:function(n){var r,i;if(!n||"string"!=typeof n)return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(o){r=t}return r&&r.documentElement&&!r.getElementsByTagName("parsererror").length||x.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&x.trim(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(D,"ms-").replace(L,H)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,t,n){var r,i=0,o=e.length,a=M(e);if(n){if(a){for(;o>i;i++)if(r=t.apply(e[i],n),r===!1)break}else for(i in e)if(r=t.apply(e[i],n),r===!1)break}else if(a){for(;o>i;i++)if(r=t.call(e[i],i,e[i]),r===!1)break}else for(i in e)if(r=t.call(e[i],i,e[i]),r===!1)break;return e},trim:b&&!b.call("\ufeff\u00a0")?function(e){return null==e?"":b.call(e)}:function(e){return null==e?"":(e+"").replace(C,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(M(Object(e))?x.merge(n,"string"==typeof e?[e]:e):h.call(n,e)),n},inArray:function(e,t,n){var r;if(t){if(m)return m.call(t,e,n);for(r=t.length,n=n?0>n?Math.max(0,r+n):n:0;r>n;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,o=0;if("number"==typeof r)for(;r>o;o++)e[i++]=n[o];else while(n[o]!==t)e[i++]=n[o++];return e.length=i,e},grep:function(e,t,n){var r,i=[],o=0,a=e.length;for(n=!!n;a>o;o++)r=!!t(e[o],o),n!==r&&i.push(e[o]);return i},map:function(e,t,n){var r,i=0,o=e.length,a=M(e),s=[];if(a)for(;o>i;i++)r=t(e[i],i,n),null!=r&&(s[s.length]=r);else for(i in e)r=t(e[i],i,n),null!=r&&(s[s.length]=r);return d.apply([],s)},guid:1,proxy:function(e,n){var r,i,o;return"string"==typeof n&&(o=e[n],n=e,e=o),x.isFunction(e)?(r=g.call(arguments,2),i=function(){return e.apply(n||this,r.concat(g.call(arguments)))},i.guid=e.guid=e.guid||x.guid++,i):t},access:function(e,n,r,i,o,a,s){var l=0,u=e.length,c=null==r;if("object"===x.type(r)){o=!0;for(l in r)x.access(e,n,l,r[l],!0,a,s)}else if(i!==t&&(o=!0,x.isFunction(i)||(s=!0),c&&(s?(n.call(e,i),n=null):(c=n,n=function(e,t,n){return c.call(x(e),n)})),n))for(;u>l;l++)n(e[l],r,s?i:i.call(e[l],l,n(e[l],r)));return o?e:c?n.call(e):u?n(e[0],r):a},now:function(){return(new Date).getTime()},swap:function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=a[o];return i}}),x.ready.promise=function(t){if(!n)if(n=x.Deferred(),"complete"===a.readyState)setTimeout(x.ready);else if(a.addEventListener)a.addEventListener("DOMContentLoaded",q,!1),e.addEventListener("load",q,!1);else{a.attachEvent("onreadystatechange",q),e.attachEvent("onload",q);var r=!1;try{r=null==e.frameElement&&a.documentElement}catch(i){}r&&r.doScroll&&function o(){if(!x.isReady){try{r.doScroll("left")}catch(e){return setTimeout(o,50)}_(),x.ready()}}()}return n.promise(t)},x.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(e,t){c["[object "+t+"]"]=t.toLowerCase()});function M(e){var t=e.length,n=x.type(e);return x.isWindow(e)?!1:1===e.nodeType&&t?!0:"array"===n||"function"!==n&&(0===t||"number"==typeof t&&t>0&&t-1 in e)}r=x(a),function(e,t){var n,r,i,o,a,s,l,u,c,p,f,d,h,g,m,y,v,b="sizzle"+-new Date,w=e.document,T=0,C=0,N=st(),k=st(),E=st(),S=!1,A=function(e,t){return e===t?(S=!0,0):0},j=typeof t,D=1<<31,L={}.hasOwnProperty,H=[],q=H.pop,_=H.push,M=H.push,O=H.slice,F=H.indexOf||function(e){var t=0,n=this.length;for(;n>t;t++)if(this[t]===e)return t;return-1},B="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",P="[\\x20\\t\\r\\n\\f]",R="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",W=R.replace("w","w#"),$="\\["+P+"*("+R+")"+P+"*(?:([*^$|!~]?=)"+P+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+W+")|)|)"+P+"*\\]",I=":("+R+")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|"+$.replace(3,8)+")*)|.*)\\)|)",z=RegExp("^"+P+"+|((?:^|[^\\\\])(?:\\\\.)*)"+P+"+$","g"),X=RegExp("^"+P+"*,"+P+"*"),U=RegExp("^"+P+"*([>+~]|"+P+")"+P+"*"),V=RegExp(P+"*[+~]"),Y=RegExp("="+P+"*([^\\]'\"]*)"+P+"*\\]","g"),J=RegExp(I),G=RegExp("^"+W+"$"),Q={ID:RegExp("^#("+R+")"),CLASS:RegExp("^\\.("+R+")"),TAG:RegExp("^("+R.replace("w","w*")+")"),ATTR:RegExp("^"+$),PSEUDO:RegExp("^"+I),CHILD:RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+P+"*(even|odd|(([+-]|)(\\d*)n|)"+P+"*(?:([+-]|)"+P+"*(\\d+)|))"+P+"*\\)|)","i"),bool:RegExp("^(?:"+B+")$","i"),needsContext:RegExp("^"+P+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+P+"*((?:-\\d)?\\d*)"+P+"*\\)|)(?=[^-]|$)","i")},K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,et=/^(?:input|select|textarea|button)$/i,tt=/^h\d$/i,nt=/'|\\/g,rt=RegExp("\\\\([\\da-f]{1,6}"+P+"?|("+P+")|.)","ig"),it=function(e,t,n){var r="0x"+t-65536;return r!==r||n?t:0>r?String.fromCharCode(r+65536):String.fromCharCode(55296|r>>10,56320|1023&r)};try{M.apply(H=O.call(w.childNodes),w.childNodes),H[w.childNodes.length].nodeType}catch(ot){M={apply:H.length?function(e,t){_.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function at(e,t,n,i){var o,a,s,l,u,c,d,m,y,x;if((t?t.ownerDocument||t:w)!==f&&p(t),t=t||f,n=n||[],!e||"string"!=typeof e)return n;if(1!==(l=t.nodeType)&&9!==l)return[];if(h&&!i){if(o=Z.exec(e))if(s=o[1]){if(9===l){if(a=t.getElementById(s),!a||!a.parentNode)return n;if(a.id===s)return n.push(a),n}else if(t.ownerDocument&&(a=t.ownerDocument.getElementById(s))&&v(t,a)&&a.id===s)return n.push(a),n}else{if(o[2])return M.apply(n,t.getElementsByTagName(e)),n;if((s=o[3])&&r.getElementsByClassName&&t.getElementsByClassName)return M.apply(n,t.getElementsByClassName(s)),n}if(r.qsa&&(!g||!g.test(e))){if(m=d=b,y=t,x=9===l&&e,1===l&&"object"!==t.nodeName.toLowerCase()){c=mt(e),(d=t.getAttribute("id"))?m=d.replace(nt,"\\$&"):t.setAttribute("id",m),m="[id='"+m+"'] ",u=c.length;while(u--)c[u]=m+yt(c[u]);y=V.test(e)&&t.parentNode||t,x=c.join(",")}if(x)try{return M.apply(n,y.querySelectorAll(x)),n}catch(T){}finally{d||t.removeAttribute("id")}}}return kt(e.replace(z,"$1"),t,n,i)}function st(){var e=[];function t(n,r){return e.push(n+=" ")>o.cacheLength&&delete t[e.shift()],t[n]=r}return t}function lt(e){return e[b]=!0,e}function ut(e){var t=f.createElement("div");try{return!!e(t)}catch(n){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function ct(e,t){var n=e.split("|"),r=e.length;while(r--)o.attrHandle[n[r]]=t}function pt(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&(~t.sourceIndex||D)-(~e.sourceIndex||D);if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function ft(e){return function(t){var n=t.nodeName.toLowerCase();return"input"===n&&t.type===e}}function dt(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function ht(e){return lt(function(t){return t=+t,lt(function(n,r){var i,o=e([],n.length,t),a=o.length;while(a--)n[i=o[a]]&&(n[i]=!(r[i]=n[i]))})})}s=at.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?"HTML"!==t.nodeName:!1},r=at.support={},p=at.setDocument=function(e){var n=e?e.ownerDocument||e:w,i=n.defaultView;return n!==f&&9===n.nodeType&&n.documentElement?(f=n,d=n.documentElement,h=!s(n),i&&i.attachEvent&&i!==i.top&&i.attachEvent("onbeforeunload",function(){p()}),r.attributes=ut(function(e){return e.className="i",!e.getAttribute("className")}),r.getElementsByTagName=ut(function(e){return e.appendChild(n.createComment("")),!e.getElementsByTagName("*").length}),r.getElementsByClassName=ut(function(e){return e.innerHTML="
",e.firstChild.className="i",2===e.getElementsByClassName("i").length}),r.getById=ut(function(e){return d.appendChild(e).id=b,!n.getElementsByName||!n.getElementsByName(b).length}),r.getById?(o.find.ID=function(e,t){if(typeof t.getElementById!==j&&h){var n=t.getElementById(e);return n&&n.parentNode?[n]:[]}},o.filter.ID=function(e){var t=e.replace(rt,it);return function(e){return e.getAttribute("id")===t}}):(delete o.find.ID,o.filter.ID=function(e){var t=e.replace(rt,it);return function(e){var n=typeof e.getAttributeNode!==j&&e.getAttributeNode("id");return n&&n.value===t}}),o.find.TAG=r.getElementsByTagName?function(e,n){return typeof n.getElementsByTagName!==j?n.getElementsByTagName(e):t}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},o.find.CLASS=r.getElementsByClassName&&function(e,n){return typeof n.getElementsByClassName!==j&&h?n.getElementsByClassName(e):t},m=[],g=[],(r.qsa=K.test(n.querySelectorAll))&&(ut(function(e){e.innerHTML="",e.querySelectorAll("[selected]").length||g.push("\\["+P+"*(?:value|"+B+")"),e.querySelectorAll(":checked").length||g.push(":checked")}),ut(function(e){var t=n.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("t",""),e.querySelectorAll("[t^='']").length&&g.push("[*^$]="+P+"*(?:''|\"\")"),e.querySelectorAll(":enabled").length||g.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),g.push(",.*:")})),(r.matchesSelector=K.test(y=d.webkitMatchesSelector||d.mozMatchesSelector||d.oMatchesSelector||d.msMatchesSelector))&&ut(function(e){r.disconnectedMatch=y.call(e,"div"),y.call(e,"[s!='']:x"),m.push("!=",I)}),g=g.length&&RegExp(g.join("|")),m=m.length&&RegExp(m.join("|")),v=K.test(d.contains)||d.compareDocumentPosition?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},A=d.compareDocumentPosition?function(e,t){if(e===t)return S=!0,0;var i=t.compareDocumentPosition&&e.compareDocumentPosition&&e.compareDocumentPosition(t);return i?1&i||!r.sortDetached&&t.compareDocumentPosition(e)===i?e===n||v(w,e)?-1:t===n||v(w,t)?1:c?F.call(c,e)-F.call(c,t):0:4&i?-1:1:e.compareDocumentPosition?-1:1}:function(e,t){var r,i=0,o=e.parentNode,a=t.parentNode,s=[e],l=[t];if(e===t)return S=!0,0;if(!o||!a)return e===n?-1:t===n?1:o?-1:a?1:c?F.call(c,e)-F.call(c,t):0;if(o===a)return pt(e,t);r=e;while(r=r.parentNode)s.unshift(r);r=t;while(r=r.parentNode)l.unshift(r);while(s[i]===l[i])i++;return i?pt(s[i],l[i]):s[i]===w?-1:l[i]===w?1:0},n):f},at.matches=function(e,t){return at(e,null,null,t)},at.matchesSelector=function(e,t){if((e.ownerDocument||e)!==f&&p(e),t=t.replace(Y,"='$1']"),!(!r.matchesSelector||!h||m&&m.test(t)||g&&g.test(t)))try{var n=y.call(e,t);if(n||r.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(i){}return at(t,f,null,[e]).length>0},at.contains=function(e,t){return(e.ownerDocument||e)!==f&&p(e),v(e,t)},at.attr=function(e,n){(e.ownerDocument||e)!==f&&p(e);var i=o.attrHandle[n.toLowerCase()],a=i&&L.call(o.attrHandle,n.toLowerCase())?i(e,n,!h):t;return a===t?r.attributes||!h?e.getAttribute(n):(a=e.getAttributeNode(n))&&a.specified?a.value:null:a},at.error=function(e){throw Error("Syntax error, unrecognized expression: "+e)},at.uniqueSort=function(e){var t,n=[],i=0,o=0;if(S=!r.detectDuplicates,c=!r.sortStable&&e.slice(0),e.sort(A),S){while(t=e[o++])t===e[o]&&(i=n.push(o));while(i--)e.splice(n[i],1)}return e},a=at.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=a(e)}else if(3===i||4===i)return e.nodeValue}else for(;t=e[r];r++)n+=a(t);return n},o=at.selectors={cacheLength:50,createPseudo:lt,match:Q,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(rt,it),e[3]=(e[4]||e[5]||"").replace(rt,it),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||at.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&at.error(e[0]),e},PSEUDO:function(e){var n,r=!e[5]&&e[2];return Q.CHILD.test(e[0])?null:(e[3]&&e[4]!==t?e[2]=e[4]:r&&J.test(r)&&(n=mt(r,!0))&&(n=r.indexOf(")",r.length-n)-r.length)&&(e[0]=e[0].slice(0,n),e[2]=r.slice(0,n)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(rt,it).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=N[e+" "];return t||(t=RegExp("(^|"+P+")"+e+"("+P+"|$)"))&&N(e,function(e){return t.test("string"==typeof e.className&&e.className||typeof e.getAttribute!==j&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=at.attr(r,e);return null==i?"!="===t:t?(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i+" ").indexOf(n)>-1:"|="===t?i===n||i.slice(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),a="last"!==e.slice(-4),s="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,l){var u,c,p,f,d,h,g=o!==a?"nextSibling":"previousSibling",m=t.parentNode,y=s&&t.nodeName.toLowerCase(),v=!l&&!s;if(m){if(o){while(g){p=t;while(p=p[g])if(s?p.nodeName.toLowerCase()===y:1===p.nodeType)return!1;h=g="only"===e&&!h&&"nextSibling"}return!0}if(h=[a?m.firstChild:m.lastChild],a&&v){c=m[b]||(m[b]={}),u=c[e]||[],d=u[0]===T&&u[1],f=u[0]===T&&u[2],p=d&&m.childNodes[d];while(p=++d&&p&&p[g]||(f=d=0)||h.pop())if(1===p.nodeType&&++f&&p===t){c[e]=[T,d,f];break}}else if(v&&(u=(t[b]||(t[b]={}))[e])&&u[0]===T)f=u[1];else while(p=++d&&p&&p[g]||(f=d=0)||h.pop())if((s?p.nodeName.toLowerCase()===y:1===p.nodeType)&&++f&&(v&&((p[b]||(p[b]={}))[e]=[T,f]),p===t))break;return f-=i,f===r||0===f%r&&f/r>=0}}},PSEUDO:function(e,t){var n,r=o.pseudos[e]||o.setFilters[e.toLowerCase()]||at.error("unsupported pseudo: "+e);return r[b]?r(t):r.length>1?(n=[e,e,"",t],o.setFilters.hasOwnProperty(e.toLowerCase())?lt(function(e,n){var i,o=r(e,t),a=o.length;while(a--)i=F.call(e,o[a]),e[i]=!(n[i]=o[a])}):function(e){return r(e,0,n)}):r}},pseudos:{not:lt(function(e){var t=[],n=[],r=l(e.replace(z,"$1"));return r[b]?lt(function(e,t,n,i){var o,a=r(e,null,i,[]),s=e.length;while(s--)(o=a[s])&&(e[s]=!(t[s]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),!n.pop()}}),has:lt(function(e){return function(t){return at(e,t).length>0}}),contains:lt(function(e){return function(t){return(t.textContent||t.innerText||a(t)).indexOf(e)>-1}}),lang:lt(function(e){return G.test(e||"")||at.error("unsupported lang: "+e),e=e.replace(rt,it).toLowerCase(),function(t){var n;do if(n=h?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return n=n.toLowerCase(),n===e||0===n.indexOf(e+"-");while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===d},focus:function(e){return e===f.activeElement&&(!f.hasFocus||f.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeName>"@"||3===e.nodeType||4===e.nodeType)return!1;return!0},parent:function(e){return!o.pseudos.empty(e)},header:function(e){return tt.test(e.nodeName)},input:function(e){return et.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||t.toLowerCase()===e.type)},first:ht(function(){return[0]}),last:ht(function(e,t){return[t-1]}),eq:ht(function(e,t,n){return[0>n?n+t:n]}),even:ht(function(e,t){var n=0;for(;t>n;n+=2)e.push(n);return e}),odd:ht(function(e,t){var n=1;for(;t>n;n+=2)e.push(n);return e}),lt:ht(function(e,t,n){var r=0>n?n+t:n;for(;--r>=0;)e.push(r);return e}),gt:ht(function(e,t,n){var r=0>n?n+t:n;for(;t>++r;)e.push(r);return e})}},o.pseudos.nth=o.pseudos.eq;for(n in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})o.pseudos[n]=ft(n);for(n in{submit:!0,reset:!0})o.pseudos[n]=dt(n);function gt(){}gt.prototype=o.filters=o.pseudos,o.setFilters=new gt;function mt(e,t){var n,r,i,a,s,l,u,c=k[e+" "];if(c)return t?0:c.slice(0);s=e,l=[],u=o.preFilter;while(s){(!n||(r=X.exec(s)))&&(r&&(s=s.slice(r[0].length)||s),l.push(i=[])),n=!1,(r=U.exec(s))&&(n=r.shift(),i.push({value:n,type:r[0].replace(z," ")}),s=s.slice(n.length));for(a in o.filter)!(r=Q[a].exec(s))||u[a]&&!(r=u[a](r))||(n=r.shift(),i.push({value:n,type:a,matches:r}),s=s.slice(n.length));if(!n)break}return t?s.length:s?at.error(e):k(e,l).slice(0)}function yt(e){var t=0,n=e.length,r="";for(;n>t;t++)r+=e[t].value;return r}function vt(e,t,n){var r=t.dir,o=n&&"parentNode"===r,a=C++;return t.first?function(t,n,i){while(t=t[r])if(1===t.nodeType||o)return e(t,n,i)}:function(t,n,s){var l,u,c,p=T+" "+a;if(s){while(t=t[r])if((1===t.nodeType||o)&&e(t,n,s))return!0}else while(t=t[r])if(1===t.nodeType||o)if(c=t[b]||(t[b]={}),(u=c[r])&&u[0]===p){if((l=u[1])===!0||l===i)return l===!0}else if(u=c[r]=[p],u[1]=e(t,n,s)||i,u[1]===!0)return!0}}function bt(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function xt(e,t,n,r,i){var o,a=[],s=0,l=e.length,u=null!=t;for(;l>s;s++)(o=e[s])&&(!n||n(o,r,i))&&(a.push(o),u&&t.push(s));return a}function wt(e,t,n,r,i,o){return r&&!r[b]&&(r=wt(r)),i&&!i[b]&&(i=wt(i,o)),lt(function(o,a,s,l){var u,c,p,f=[],d=[],h=a.length,g=o||Nt(t||"*",s.nodeType?[s]:s,[]),m=!e||!o&&t?g:xt(g,f,e,s,l),y=n?i||(o?e:h||r)?[]:a:m;if(n&&n(m,y,s,l),r){u=xt(y,d),r(u,[],s,l),c=u.length;while(c--)(p=u[c])&&(y[d[c]]=!(m[d[c]]=p))}if(o){if(i||e){if(i){u=[],c=y.length;while(c--)(p=y[c])&&u.push(m[c]=p);i(null,y=[],u,l)}c=y.length;while(c--)(p=y[c])&&(u=i?F.call(o,p):f[c])>-1&&(o[u]=!(a[u]=p))}}else y=xt(y===a?y.splice(h,y.length):y),i?i(null,a,y,l):M.apply(a,y)})}function Tt(e){var t,n,r,i=e.length,a=o.relative[e[0].type],s=a||o.relative[" "],l=a?1:0,c=vt(function(e){return e===t},s,!0),p=vt(function(e){return F.call(t,e)>-1},s,!0),f=[function(e,n,r){return!a&&(r||n!==u)||((t=n).nodeType?c(e,n,r):p(e,n,r))}];for(;i>l;l++)if(n=o.relative[e[l].type])f=[vt(bt(f),n)];else{if(n=o.filter[e[l].type].apply(null,e[l].matches),n[b]){for(r=++l;i>r;r++)if(o.relative[e[r].type])break;return wt(l>1&&bt(f),l>1&&yt(e.slice(0,l-1).concat({value:" "===e[l-2].type?"*":""})).replace(z,"$1"),n,r>l&&Tt(e.slice(l,r)),i>r&&Tt(e=e.slice(r)),i>r&&yt(e))}f.push(n)}return bt(f)}function Ct(e,t){var n=0,r=t.length>0,a=e.length>0,s=function(s,l,c,p,d){var h,g,m,y=[],v=0,b="0",x=s&&[],w=null!=d,C=u,N=s||a&&o.find.TAG("*",d&&l.parentNode||l),k=T+=null==C?1:Math.random()||.1;for(w&&(u=l!==f&&l,i=n);null!=(h=N[b]);b++){if(a&&h){g=0;while(m=e[g++])if(m(h,l,c)){p.push(h);break}w&&(T=k,i=++n)}r&&((h=!m&&h)&&v--,s&&x.push(h))}if(v+=b,r&&b!==v){g=0;while(m=t[g++])m(x,y,l,c);if(s){if(v>0)while(b--)x[b]||y[b]||(y[b]=q.call(p));y=xt(y)}M.apply(p,y),w&&!s&&y.length>0&&v+t.length>1&&at.uniqueSort(p)}return w&&(T=k,u=C),x};return r?lt(s):s}l=at.compile=function(e,t){var n,r=[],i=[],o=E[e+" "];if(!o){t||(t=mt(e)),n=t.length;while(n--)o=Tt(t[n]),o[b]?r.push(o):i.push(o);o=E(e,Ct(i,r))}return o};function Nt(e,t,n){var r=0,i=t.length;for(;i>r;r++)at(e,t[r],n);return n}function kt(e,t,n,i){var a,s,u,c,p,f=mt(e);if(!i&&1===f.length){if(s=f[0]=f[0].slice(0),s.length>2&&"ID"===(u=s[0]).type&&r.getById&&9===t.nodeType&&h&&o.relative[s[1].type]){if(t=(o.find.ID(u.matches[0].replace(rt,it),t)||[])[0],!t)return n;e=e.slice(s.shift().value.length)}a=Q.needsContext.test(e)?0:s.length;while(a--){if(u=s[a],o.relative[c=u.type])break;if((p=o.find[c])&&(i=p(u.matches[0].replace(rt,it),V.test(s[0].type)&&t.parentNode||t))){if(s.splice(a,1),e=i.length&&yt(s),!e)return M.apply(n,i),n;break}}}return l(e,f)(i,t,!h,n,V.test(e)),n}r.sortStable=b.split("").sort(A).join("")===b,r.detectDuplicates=S,p(),r.sortDetached=ut(function(e){return 1&e.compareDocumentPosition(f.createElement("div"))}),ut(function(e){return e.innerHTML="","#"===e.firstChild.getAttribute("href")})||ct("type|href|height|width",function(e,n,r){return r?t:e.getAttribute(n,"type"===n.toLowerCase()?1:2)}),r.attributes&&ut(function(e){return e.innerHTML="",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||ct("value",function(e,n,r){return r||"input"!==e.nodeName.toLowerCase()?t:e.defaultValue}),ut(function(e){return null==e.getAttribute("disabled")})||ct(B,function(e,n,r){var i;return r?t:(i=e.getAttributeNode(n))&&i.specified?i.value:e[n]===!0?n.toLowerCase():null}),x.find=at,x.expr=at.selectors,x.expr[":"]=x.expr.pseudos,x.unique=at.uniqueSort,x.text=at.getText,x.isXMLDoc=at.isXML,x.contains=at.contains}(e);var O={};function F(e){var t=O[e]={};return x.each(e.match(T)||[],function(e,n){t[n]=!0}),t}x.Callbacks=function(e){e="string"==typeof e?O[e]||F(e):x.extend({},e);var n,r,i,o,a,s,l=[],u=!e.once&&[],c=function(t){for(r=e.memory&&t,i=!0,a=s||0,s=0,o=l.length,n=!0;l&&o>a;a++)if(l[a].apply(t[0],t[1])===!1&&e.stopOnFalse){r=!1;break}n=!1,l&&(u?u.length&&c(u.shift()):r?l=[]:p.disable())},p={add:function(){if(l){var t=l.length;(function i(t){x.each(t,function(t,n){var r=x.type(n);"function"===r?e.unique&&p.has(n)||l.push(n):n&&n.length&&"string"!==r&&i(n)})})(arguments),n?o=l.length:r&&(s=t,c(r))}return this},remove:function(){return l&&x.each(arguments,function(e,t){var r;while((r=x.inArray(t,l,r))>-1)l.splice(r,1),n&&(o>=r&&o--,a>=r&&a--)}),this},has:function(e){return e?x.inArray(e,l)>-1:!(!l||!l.length)},empty:function(){return l=[],o=0,this},disable:function(){return l=u=r=t,this},disabled:function(){return!l},lock:function(){return u=t,r||p.disable(),this},locked:function(){return!u},fireWith:function(e,t){return!l||i&&!u||(t=t||[],t=[e,t.slice?t.slice():t],n?u.push(t):c(t)),this},fire:function(){return p.fireWith(this,arguments),this},fired:function(){return!!i}};return p},x.extend({Deferred:function(e){var t=[["resolve","done",x.Callbacks("once memory"),"resolved"],["reject","fail",x.Callbacks("once memory"),"rejected"],["notify","progress",x.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return x.Deferred(function(n){x.each(t,function(t,o){var a=o[0],s=x.isFunction(e[t])&&e[t];i[o[1]](function(){var e=s&&s.apply(this,arguments);e&&x.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[a+"With"](this===r?n.promise():this,s?[e]:arguments)})}),e=null}).promise()},promise:function(e){return null!=e?x.extend(e,r):r}},i={};return r.pipe=r.then,x.each(t,function(e,o){var a=o[2],s=o[3];r[o[1]]=a.add,s&&a.add(function(){n=s},t[1^e][2].disable,t[2][2].lock),i[o[0]]=function(){return i[o[0]+"With"](this===i?r:this,arguments),this},i[o[0]+"With"]=a.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=g.call(arguments),r=n.length,i=1!==r||e&&x.isFunction(e.promise)?r:0,o=1===i?e:x.Deferred(),a=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?g.call(arguments):r,n===s?o.notifyWith(t,n):--i||o.resolveWith(t,n)}},s,l,u;if(r>1)for(s=Array(r),l=Array(r),u=Array(r);r>t;t++)n[t]&&x.isFunction(n[t].promise)?n[t].promise().done(a(t,u,n)).fail(o.reject).progress(a(t,l,s)):--i;return i||o.resolveWith(u,n),o.promise()}}),x.support=function(t){var n,r,o,s,l,u,c,p,f,d=a.createElement("div");if(d.setAttribute("className","t"),d.innerHTML="
a",n=d.getElementsByTagName("*")||[],r=d.getElementsByTagName("a")[0],!r||!r.style||!n.length)return t;s=a.createElement("select"),u=s.appendChild(a.createElement("option")),o=d.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t.getSetAttribute="t"!==d.className,t.leadingWhitespace=3===d.firstChild.nodeType,t.tbody=!d.getElementsByTagName("tbody").length,t.htmlSerialize=!!d.getElementsByTagName("link").length,t.style=/top/.test(r.getAttribute("style")),t.hrefNormalized="/a"===r.getAttribute("href"),t.opacity=/^0.5/.test(r.style.opacity),t.cssFloat=!!r.style.cssFloat,t.checkOn=!!o.value,t.optSelected=u.selected,t.enctype=!!a.createElement("form").enctype,t.html5Clone="<:nav>"!==a.createElement("nav").cloneNode(!0).outerHTML,t.inlineBlockNeedsLayout=!1,t.shrinkWrapBlocks=!1,t.pixelPosition=!1,t.deleteExpando=!0,t.noCloneEvent=!0,t.reliableMarginRight=!0,t.boxSizingReliable=!0,o.checked=!0,t.noCloneChecked=o.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!u.disabled;try{delete d.test}catch(h){t.deleteExpando=!1}o=a.createElement("input"),o.setAttribute("value",""),t.input=""===o.getAttribute("value"),o.value="t",o.setAttribute("type","radio"),t.radioValue="t"===o.value,o.setAttribute("checked","t"),o.setAttribute("name","t"),l=a.createDocumentFragment(),l.appendChild(o),t.appendChecked=o.checked,t.checkClone=l.cloneNode(!0).cloneNode(!0).lastChild.checked,d.attachEvent&&(d.attachEvent("onclick",function(){t.noCloneEvent=!1}),d.cloneNode(!0).click());for(f in{submit:!0,change:!0,focusin:!0})d.setAttribute(c="on"+f,"t"),t[f+"Bubbles"]=c in e||d.attributes[c].expando===!1;d.style.backgroundClip="content-box",d.cloneNode(!0).style.backgroundClip="",t.clearCloneStyle="content-box"===d.style.backgroundClip;for(f in x(t))break;return t.ownLast="0"!==f,x(function(){var n,r,o,s="padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;",l=a.getElementsByTagName("body")[0];l&&(n=a.createElement("div"),n.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",l.appendChild(n).appendChild(d),d.innerHTML="
t
",o=d.getElementsByTagName("td"),o[0].style.cssText="padding:0;margin:0;border:0;display:none",p=0===o[0].offsetHeight,o[0].style.display="",o[1].style.display="none",t.reliableHiddenOffsets=p&&0===o[0].offsetHeight,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",x.swap(l,null!=l.style.zoom?{zoom:1}:{},function(){t.boxSizing=4===d.offsetWidth}),e.getComputedStyle&&(t.pixelPosition="1%"!==(e.getComputedStyle(d,null)||{}).top,t.boxSizingReliable="4px"===(e.getComputedStyle(d,null)||{width:"4px"}).width,r=d.appendChild(a.createElement("div")),r.style.cssText=d.style.cssText=s,r.style.marginRight=r.style.width="0",d.style.width="1px",t.reliableMarginRight=!parseFloat((e.getComputedStyle(r,null)||{}).marginRight)),typeof d.style.zoom!==i&&(d.innerHTML="",d.style.cssText=s+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=3===d.offsetWidth,d.style.display="block",d.innerHTML="
",d.firstChild.style.width="5px",t.shrinkWrapBlocks=3!==d.offsetWidth,t.inlineBlockNeedsLayout&&(l.style.zoom=1)),l.removeChild(n),n=d=o=r=null)}),n=s=l=u=r=o=null,t +}({});var B=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;function R(e,n,r,i){if(x.acceptData(e)){var o,a,s=x.expando,l=e.nodeType,u=l?x.cache:e,c=l?e[s]:e[s]&&s;if(c&&u[c]&&(i||u[c].data)||r!==t||"string"!=typeof n)return c||(c=l?e[s]=p.pop()||x.guid++:s),u[c]||(u[c]=l?{}:{toJSON:x.noop}),("object"==typeof n||"function"==typeof n)&&(i?u[c]=x.extend(u[c],n):u[c].data=x.extend(u[c].data,n)),a=u[c],i||(a.data||(a.data={}),a=a.data),r!==t&&(a[x.camelCase(n)]=r),"string"==typeof n?(o=a[n],null==o&&(o=a[x.camelCase(n)])):o=a,o}}function W(e,t,n){if(x.acceptData(e)){var r,i,o=e.nodeType,a=o?x.cache:e,s=o?e[x.expando]:x.expando;if(a[s]){if(t&&(r=n?a[s]:a[s].data)){x.isArray(t)?t=t.concat(x.map(t,x.camelCase)):t in r?t=[t]:(t=x.camelCase(t),t=t in r?[t]:t.split(" ")),i=t.length;while(i--)delete r[t[i]];if(n?!I(r):!x.isEmptyObject(r))return}(n||(delete a[s].data,I(a[s])))&&(o?x.cleanData([e],!0):x.support.deleteExpando||a!=a.window?delete a[s]:a[s]=null)}}}x.extend({cache:{},noData:{applet:!0,embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(e){return e=e.nodeType?x.cache[e[x.expando]]:e[x.expando],!!e&&!I(e)},data:function(e,t,n){return R(e,t,n)},removeData:function(e,t){return W(e,t)},_data:function(e,t,n){return R(e,t,n,!0)},_removeData:function(e,t){return W(e,t,!0)},acceptData:function(e){if(e.nodeType&&1!==e.nodeType&&9!==e.nodeType)return!1;var t=e.nodeName&&x.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),x.fn.extend({data:function(e,n){var r,i,o=null,a=0,s=this[0];if(e===t){if(this.length&&(o=x.data(s),1===s.nodeType&&!x._data(s,"parsedAttrs"))){for(r=s.attributes;r.length>a;a++)i=r[a].name,0===i.indexOf("data-")&&(i=x.camelCase(i.slice(5)),$(s,i,o[i]));x._data(s,"parsedAttrs",!0)}return o}return"object"==typeof e?this.each(function(){x.data(this,e)}):arguments.length>1?this.each(function(){x.data(this,e,n)}):s?$(s,e,x.data(s,e)):null},removeData:function(e){return this.each(function(){x.removeData(this,e)})}});function $(e,n,r){if(r===t&&1===e.nodeType){var i="data-"+n.replace(P,"-$1").toLowerCase();if(r=e.getAttribute(i),"string"==typeof r){try{r="true"===r?!0:"false"===r?!1:"null"===r?null:+r+""===r?+r:B.test(r)?x.parseJSON(r):r}catch(o){}x.data(e,n,r)}else r=t}return r}function I(e){var t;for(t in e)if(("data"!==t||!x.isEmptyObject(e[t]))&&"toJSON"!==t)return!1;return!0}x.extend({queue:function(e,n,r){var i;return e?(n=(n||"fx")+"queue",i=x._data(e,n),r&&(!i||x.isArray(r)?i=x._data(e,n,x.makeArray(r)):i.push(r)),i||[]):t},dequeue:function(e,t){t=t||"fx";var n=x.queue(e,t),r=n.length,i=n.shift(),o=x._queueHooks(e,t),a=function(){x.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return x._data(e,n)||x._data(e,n,{empty:x.Callbacks("once memory").add(function(){x._removeData(e,t+"queue"),x._removeData(e,n)})})}}),x.fn.extend({queue:function(e,n){var r=2;return"string"!=typeof e&&(n=e,e="fx",r--),r>arguments.length?x.queue(this[0],e):n===t?this:this.each(function(){var t=x.queue(this,e,n);x._queueHooks(this,e),"fx"===e&&"inprogress"!==t[0]&&x.dequeue(this,e)})},dequeue:function(e){return this.each(function(){x.dequeue(this,e)})},delay:function(e,t){return e=x.fx?x.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,o=x.Deferred(),a=this,s=this.length,l=function(){--i||o.resolveWith(a,[a])};"string"!=typeof e&&(n=e,e=t),e=e||"fx";while(s--)r=x._data(a[s],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(l));return l(),o.promise(n)}});var z,X,U=/[\t\r\n\f]/g,V=/\r/g,Y=/^(?:input|select|textarea|button|object)$/i,J=/^(?:a|area)$/i,G=/^(?:checked|selected)$/i,Q=x.support.getSetAttribute,K=x.support.input;x.fn.extend({attr:function(e,t){return x.access(this,x.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){x.removeAttr(this,e)})},prop:function(e,t){return x.access(this,x.prop,e,t,arguments.length>1)},removeProp:function(e){return e=x.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,o,a=0,s=this.length,l="string"==typeof e&&e;if(x.isFunction(e))return this.each(function(t){x(this).addClass(e.call(this,t,this.className))});if(l)for(t=(e||"").match(T)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(U," "):" ")){o=0;while(i=t[o++])0>r.indexOf(" "+i+" ")&&(r+=i+" ");n.className=x.trim(r)}return this},removeClass:function(e){var t,n,r,i,o,a=0,s=this.length,l=0===arguments.length||"string"==typeof e&&e;if(x.isFunction(e))return this.each(function(t){x(this).removeClass(e.call(this,t,this.className))});if(l)for(t=(e||"").match(T)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(U," "):"")){o=0;while(i=t[o++])while(r.indexOf(" "+i+" ")>=0)r=r.replace(" "+i+" "," ");n.className=e?x.trim(r):""}return this},toggleClass:function(e,t){var n=typeof e;return"boolean"==typeof t&&"string"===n?t?this.addClass(e):this.removeClass(e):x.isFunction(e)?this.each(function(n){x(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if("string"===n){var t,r=0,o=x(this),a=e.match(T)||[];while(t=a[r++])o.hasClass(t)?o.removeClass(t):o.addClass(t)}else(n===i||"boolean"===n)&&(this.className&&x._data(this,"__className__",this.className),this.className=this.className||e===!1?"":x._data(this,"__className__")||"")})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;r>n;n++)if(1===this[n].nodeType&&(" "+this[n].className+" ").replace(U," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,o=this[0];{if(arguments.length)return i=x.isFunction(e),this.each(function(n){var o;1===this.nodeType&&(o=i?e.call(this,n,x(this).val()):e,null==o?o="":"number"==typeof o?o+="":x.isArray(o)&&(o=x.map(o,function(e){return null==e?"":e+""})),r=x.valHooks[this.type]||x.valHooks[this.nodeName.toLowerCase()],r&&"set"in r&&r.set(this,o,"value")!==t||(this.value=o))});if(o)return r=x.valHooks[o.type]||x.valHooks[o.nodeName.toLowerCase()],r&&"get"in r&&(n=r.get(o,"value"))!==t?n:(n=o.value,"string"==typeof n?n.replace(V,""):null==n?"":n)}}}),x.extend({valHooks:{option:{get:function(e){var t=x.find.attr(e,"value");return null!=t?t:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,o="select-one"===e.type||0>i,a=o?null:[],s=o?i+1:r.length,l=0>i?s:o?i:0;for(;s>l;l++)if(n=r[l],!(!n.selected&&l!==i||(x.support.optDisabled?n.disabled:null!==n.getAttribute("disabled"))||n.parentNode.disabled&&x.nodeName(n.parentNode,"optgroup"))){if(t=x(n).val(),o)return t;a.push(t)}return a},set:function(e,t){var n,r,i=e.options,o=x.makeArray(t),a=i.length;while(a--)r=i[a],(r.selected=x.inArray(x(r).val(),o)>=0)&&(n=!0);return n||(e.selectedIndex=-1),o}}},attr:function(e,n,r){var o,a,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return typeof e.getAttribute===i?x.prop(e,n,r):(1===s&&x.isXMLDoc(e)||(n=n.toLowerCase(),o=x.attrHooks[n]||(x.expr.match.bool.test(n)?X:z)),r===t?o&&"get"in o&&null!==(a=o.get(e,n))?a:(a=x.find.attr(e,n),null==a?t:a):null!==r?o&&"set"in o&&(a=o.set(e,r,n))!==t?a:(e.setAttribute(n,r+""),r):(x.removeAttr(e,n),t))},removeAttr:function(e,t){var n,r,i=0,o=t&&t.match(T);if(o&&1===e.nodeType)while(n=o[i++])r=x.propFix[n]||n,x.expr.match.bool.test(n)?K&&Q||!G.test(n)?e[r]=!1:e[x.camelCase("default-"+n)]=e[r]=!1:x.attr(e,n,""),e.removeAttribute(Q?n:r)},attrHooks:{type:{set:function(e,t){if(!x.support.radioValue&&"radio"===t&&x.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},propFix:{"for":"htmlFor","class":"className"},prop:function(e,n,r){var i,o,a,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return a=1!==s||!x.isXMLDoc(e),a&&(n=x.propFix[n]||n,o=x.propHooks[n]),r!==t?o&&"set"in o&&(i=o.set(e,r,n))!==t?i:e[n]=r:o&&"get"in o&&null!==(i=o.get(e,n))?i:e[n]},propHooks:{tabIndex:{get:function(e){var t=x.find.attr(e,"tabindex");return t?parseInt(t,10):Y.test(e.nodeName)||J.test(e.nodeName)&&e.href?0:-1}}}}),X={set:function(e,t,n){return t===!1?x.removeAttr(e,n):K&&Q||!G.test(n)?e.setAttribute(!Q&&x.propFix[n]||n,n):e[x.camelCase("default-"+n)]=e[n]=!0,n}},x.each(x.expr.match.bool.source.match(/\w+/g),function(e,n){var r=x.expr.attrHandle[n]||x.find.attr;x.expr.attrHandle[n]=K&&Q||!G.test(n)?function(e,n,i){var o=x.expr.attrHandle[n],a=i?t:(x.expr.attrHandle[n]=t)!=r(e,n,i)?n.toLowerCase():null;return x.expr.attrHandle[n]=o,a}:function(e,n,r){return r?t:e[x.camelCase("default-"+n)]?n.toLowerCase():null}}),K&&Q||(x.attrHooks.value={set:function(e,n,r){return x.nodeName(e,"input")?(e.defaultValue=n,t):z&&z.set(e,n,r)}}),Q||(z={set:function(e,n,r){var i=e.getAttributeNode(r);return i||e.setAttributeNode(i=e.ownerDocument.createAttribute(r)),i.value=n+="","value"===r||n===e.getAttribute(r)?n:t}},x.expr.attrHandle.id=x.expr.attrHandle.name=x.expr.attrHandle.coords=function(e,n,r){var i;return r?t:(i=e.getAttributeNode(n))&&""!==i.value?i.value:null},x.valHooks.button={get:function(e,n){var r=e.getAttributeNode(n);return r&&r.specified?r.value:t},set:z.set},x.attrHooks.contenteditable={set:function(e,t,n){z.set(e,""===t?!1:t,n)}},x.each(["width","height"],function(e,n){x.attrHooks[n]={set:function(e,r){return""===r?(e.setAttribute(n,"auto"),r):t}}})),x.support.hrefNormalized||x.each(["href","src"],function(e,t){x.propHooks[t]={get:function(e){return e.getAttribute(t,4)}}}),x.support.style||(x.attrHooks.style={get:function(e){return e.style.cssText||t},set:function(e,t){return e.style.cssText=t+""}}),x.support.optSelected||(x.propHooks.selected={get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}}),x.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){x.propFix[this.toLowerCase()]=this}),x.support.enctype||(x.propFix.enctype="encoding"),x.each(["radio","checkbox"],function(){x.valHooks[this]={set:function(e,n){return x.isArray(n)?e.checked=x.inArray(x(e).val(),n)>=0:t}},x.support.checkOn||(x.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var Z=/^(?:input|select|textarea)$/i,et=/^key/,tt=/^(?:mouse|contextmenu)|click/,nt=/^(?:focusinfocus|focusoutblur)$/,rt=/^([^.]*)(?:\.(.+)|)$/;function it(){return!0}function ot(){return!1}function at(){try{return a.activeElement}catch(e){}}x.event={global:{},add:function(e,n,r,o,a){var s,l,u,c,p,f,d,h,g,m,y,v=x._data(e);if(v){r.handler&&(c=r,r=c.handler,a=c.selector),r.guid||(r.guid=x.guid++),(l=v.events)||(l=v.events={}),(f=v.handle)||(f=v.handle=function(e){return typeof x===i||e&&x.event.triggered===e.type?t:x.event.dispatch.apply(f.elem,arguments)},f.elem=e),n=(n||"").match(T)||[""],u=n.length;while(u--)s=rt.exec(n[u])||[],g=y=s[1],m=(s[2]||"").split(".").sort(),g&&(p=x.event.special[g]||{},g=(a?p.delegateType:p.bindType)||g,p=x.event.special[g]||{},d=x.extend({type:g,origType:y,data:o,handler:r,guid:r.guid,selector:a,needsContext:a&&x.expr.match.needsContext.test(a),namespace:m.join(".")},c),(h=l[g])||(h=l[g]=[],h.delegateCount=0,p.setup&&p.setup.call(e,o,m,f)!==!1||(e.addEventListener?e.addEventListener(g,f,!1):e.attachEvent&&e.attachEvent("on"+g,f))),p.add&&(p.add.call(e,d),d.handler.guid||(d.handler.guid=r.guid)),a?h.splice(h.delegateCount++,0,d):h.push(d),x.event.global[g]=!0);e=null}},remove:function(e,t,n,r,i){var o,a,s,l,u,c,p,f,d,h,g,m=x.hasData(e)&&x._data(e);if(m&&(c=m.events)){t=(t||"").match(T)||[""],u=t.length;while(u--)if(s=rt.exec(t[u])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){p=x.event.special[d]||{},d=(r?p.delegateType:p.bindType)||d,f=c[d]||[],s=s[2]&&RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),l=o=f.length;while(o--)a=f[o],!i&&g!==a.origType||n&&n.guid!==a.guid||s&&!s.test(a.namespace)||r&&r!==a.selector&&("**"!==r||!a.selector)||(f.splice(o,1),a.selector&&f.delegateCount--,p.remove&&p.remove.call(e,a));l&&!f.length&&(p.teardown&&p.teardown.call(e,h,m.handle)!==!1||x.removeEvent(e,d,m.handle),delete c[d])}else for(d in c)x.event.remove(e,d+t[u],n,r,!0);x.isEmptyObject(c)&&(delete m.handle,x._removeData(e,"events"))}},trigger:function(n,r,i,o){var s,l,u,c,p,f,d,h=[i||a],g=v.call(n,"type")?n.type:n,m=v.call(n,"namespace")?n.namespace.split("."):[];if(u=f=i=i||a,3!==i.nodeType&&8!==i.nodeType&&!nt.test(g+x.event.triggered)&&(g.indexOf(".")>=0&&(m=g.split("."),g=m.shift(),m.sort()),l=0>g.indexOf(":")&&"on"+g,n=n[x.expando]?n:new x.Event(g,"object"==typeof n&&n),n.isTrigger=o?2:3,n.namespace=m.join("."),n.namespace_re=n.namespace?RegExp("(^|\\.)"+m.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,n.result=t,n.target||(n.target=i),r=null==r?[n]:x.makeArray(r,[n]),p=x.event.special[g]||{},o||!p.trigger||p.trigger.apply(i,r)!==!1)){if(!o&&!p.noBubble&&!x.isWindow(i)){for(c=p.delegateType||g,nt.test(c+g)||(u=u.parentNode);u;u=u.parentNode)h.push(u),f=u;f===(i.ownerDocument||a)&&h.push(f.defaultView||f.parentWindow||e)}d=0;while((u=h[d++])&&!n.isPropagationStopped())n.type=d>1?c:p.bindType||g,s=(x._data(u,"events")||{})[n.type]&&x._data(u,"handle"),s&&s.apply(u,r),s=l&&u[l],s&&x.acceptData(u)&&s.apply&&s.apply(u,r)===!1&&n.preventDefault();if(n.type=g,!o&&!n.isDefaultPrevented()&&(!p._default||p._default.apply(h.pop(),r)===!1)&&x.acceptData(i)&&l&&i[g]&&!x.isWindow(i)){f=i[l],f&&(i[l]=null),x.event.triggered=g;try{i[g]()}catch(y){}x.event.triggered=t,f&&(i[l]=f)}return n.result}},dispatch:function(e){e=x.event.fix(e);var n,r,i,o,a,s=[],l=g.call(arguments),u=(x._data(this,"events")||{})[e.type]||[],c=x.event.special[e.type]||{};if(l[0]=e,e.delegateTarget=this,!c.preDispatch||c.preDispatch.call(this,e)!==!1){s=x.event.handlers.call(this,e,u),n=0;while((o=s[n++])&&!e.isPropagationStopped()){e.currentTarget=o.elem,a=0;while((i=o.handlers[a++])&&!e.isImmediatePropagationStopped())(!e.namespace_re||e.namespace_re.test(i.namespace))&&(e.handleObj=i,e.data=i.data,r=((x.event.special[i.origType]||{}).handle||i.handler).apply(o.elem,l),r!==t&&(e.result=r)===!1&&(e.preventDefault(),e.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,e),e.result}},handlers:function(e,n){var r,i,o,a,s=[],l=n.delegateCount,u=e.target;if(l&&u.nodeType&&(!e.button||"click"!==e.type))for(;u!=this;u=u.parentNode||this)if(1===u.nodeType&&(u.disabled!==!0||"click"!==e.type)){for(o=[],a=0;l>a;a++)i=n[a],r=i.selector+" ",o[r]===t&&(o[r]=i.needsContext?x(r,this).index(u)>=0:x.find(r,this,null,[u]).length),o[r]&&o.push(i);o.length&&s.push({elem:u,handlers:o})}return n.length>l&&s.push({elem:this,handlers:n.slice(l)}),s},fix:function(e){if(e[x.expando])return e;var t,n,r,i=e.type,o=e,s=this.fixHooks[i];s||(this.fixHooks[i]=s=tt.test(i)?this.mouseHooks:et.test(i)?this.keyHooks:{}),r=s.props?this.props.concat(s.props):this.props,e=new x.Event(o),t=r.length;while(t--)n=r[t],e[n]=o[n];return e.target||(e.target=o.srcElement||a),3===e.target.nodeType&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,o):e},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return null==e.which&&(e.which=null!=t.charCode?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,i,o,s=n.button,l=n.fromElement;return null==e.pageX&&null!=n.clientX&&(i=e.target.ownerDocument||a,o=i.documentElement,r=i.body,e.pageX=n.clientX+(o&&o.scrollLeft||r&&r.scrollLeft||0)-(o&&o.clientLeft||r&&r.clientLeft||0),e.pageY=n.clientY+(o&&o.scrollTop||r&&r.scrollTop||0)-(o&&o.clientTop||r&&r.clientTop||0)),!e.relatedTarget&&l&&(e.relatedTarget=l===e.target?n.toElement:l),e.which||s===t||(e.which=1&s?1:2&s?3:4&s?2:0),e}},special:{load:{noBubble:!0},focus:{trigger:function(){if(this!==at()&&this.focus)try{return this.focus(),!1}catch(e){}},delegateType:"focusin"},blur:{trigger:function(){return this===at()&&this.blur?(this.blur(),!1):t},delegateType:"focusout"},click:{trigger:function(){return x.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):t},_default:function(e){return x.nodeName(e.target,"a")}},beforeunload:{postDispatch:function(e){e.result!==t&&(e.originalEvent.returnValue=e.result)}}},simulate:function(e,t,n,r){var i=x.extend(new x.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?x.event.trigger(i,null,t):x.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},x.removeEvent=a.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]===i&&(e[r]=null),e.detachEvent(r,n))},x.Event=function(e,n){return this instanceof x.Event?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?it:ot):this.type=e,n&&x.extend(this,n),this.timeStamp=e&&e.timeStamp||x.now(),this[x.expando]=!0,t):new x.Event(e,n)},x.Event.prototype={isDefaultPrevented:ot,isPropagationStopped:ot,isImmediatePropagationStopped:ot,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=it,e&&(e.preventDefault?e.preventDefault():e.returnValue=!1)},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=it,e&&(e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=it,this.stopPropagation()}},x.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){x.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,o=e.handleObj;return(!i||i!==r&&!x.contains(r,i))&&(e.type=o.origType,n=o.handler.apply(this,arguments),e.type=t),n}}}),x.support.submitBubbles||(x.event.special.submit={setup:function(){return x.nodeName(this,"form")?!1:(x.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=x.nodeName(n,"input")||x.nodeName(n,"button")?n.form:t;r&&!x._data(r,"submitBubbles")&&(x.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),x._data(r,"submitBubbles",!0))}),t)},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&x.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){return x.nodeName(this,"form")?!1:(x.event.remove(this,"._submit"),t)}}),x.support.changeBubbles||(x.event.special.change={setup:function(){return Z.test(this.nodeName)?(("checkbox"===this.type||"radio"===this.type)&&(x.event.add(this,"propertychange._change",function(e){"checked"===e.originalEvent.propertyName&&(this._just_changed=!0)}),x.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),x.event.simulate("change",this,e,!0)})),!1):(x.event.add(this,"beforeactivate._change",function(e){var t=e.target;Z.test(t.nodeName)&&!x._data(t,"changeBubbles")&&(x.event.add(t,"change._change",function(e){!this.parentNode||e.isSimulated||e.isTrigger||x.event.simulate("change",this.parentNode,e,!0)}),x._data(t,"changeBubbles",!0))}),t)},handle:function(e){var n=e.target;return this!==n||e.isSimulated||e.isTrigger||"radio"!==n.type&&"checkbox"!==n.type?e.handleObj.handler.apply(this,arguments):t},teardown:function(){return x.event.remove(this,"._change"),!Z.test(this.nodeName)}}),x.support.focusinBubbles||x.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){x.event.simulate(t,e.target,x.event.fix(e),!0)};x.event.special[t]={setup:function(){0===n++&&a.addEventListener(e,r,!0)},teardown:function(){0===--n&&a.removeEventListener(e,r,!0)}}}),x.fn.extend({on:function(e,n,r,i,o){var a,s;if("object"==typeof e){"string"!=typeof n&&(r=r||n,n=t);for(a in e)this.on(a,n,r,e[a],o);return this}if(null==r&&null==i?(i=n,r=n=t):null==i&&("string"==typeof n?(i=r,r=t):(i=r,r=n,n=t)),i===!1)i=ot;else if(!i)return this;return 1===o&&(s=i,i=function(e){return x().off(e),s.apply(this,arguments)},i.guid=s.guid||(s.guid=x.guid++)),this.each(function(){x.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,o;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,x(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if("object"==typeof e){for(o in e)this.off(o,n,e[o]);return this}return(n===!1||"function"==typeof n)&&(r=n,n=t),r===!1&&(r=ot),this.each(function(){x.event.remove(this,e,r,n)})},trigger:function(e,t){return this.each(function(){x.event.trigger(e,t,this)})},triggerHandler:function(e,n){var r=this[0];return r?x.event.trigger(e,n,r,!0):t}});var st=/^.[^:#\[\.,]*$/,lt=/^(?:parents|prev(?:Until|All))/,ut=x.expr.match.needsContext,ct={children:!0,contents:!0,next:!0,prev:!0};x.fn.extend({find:function(e){var t,n=[],r=this,i=r.length;if("string"!=typeof e)return this.pushStack(x(e).filter(function(){for(t=0;i>t;t++)if(x.contains(r[t],this))return!0}));for(t=0;i>t;t++)x.find(e,r[t],n);return n=this.pushStack(i>1?x.unique(n):n),n.selector=this.selector?this.selector+" "+e:e,n},has:function(e){var t,n=x(e,this),r=n.length;return this.filter(function(){for(t=0;r>t;t++)if(x.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e||[],!0))},filter:function(e){return this.pushStack(ft(this,e||[],!1))},is:function(e){return!!ft(this,"string"==typeof e&&ut.test(e)?x(e):e||[],!1).length},closest:function(e,t){var n,r=0,i=this.length,o=[],a=ut.test(e)||"string"!=typeof e?x(e,t||this.context):0;for(;i>r;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(11>n.nodeType&&(a?a.index(n)>-1:1===n.nodeType&&x.find.matchesSelector(n,e))){n=o.push(n);break}return this.pushStack(o.length>1?x.unique(o):o)},index:function(e){return e?"string"==typeof e?x.inArray(this[0],x(e)):x.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){var n="string"==typeof e?x(e,t):x.makeArray(e&&e.nodeType?[e]:e),r=x.merge(this.get(),n);return this.pushStack(x.unique(r))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}});function pt(e,t){do e=e[t];while(e&&1!==e.nodeType);return e}x.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return x.dir(e,"parentNode")},parentsUntil:function(e,t,n){return x.dir(e,"parentNode",n)},next:function(e){return pt(e,"nextSibling")},prev:function(e){return pt(e,"previousSibling")},nextAll:function(e){return x.dir(e,"nextSibling")},prevAll:function(e){return x.dir(e,"previousSibling")},nextUntil:function(e,t,n){return x.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return x.dir(e,"previousSibling",n)},siblings:function(e){return x.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return x.sibling(e.firstChild)},contents:function(e){return x.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:x.merge([],e.childNodes)}},function(e,t){x.fn[e]=function(n,r){var i=x.map(this,t,n);return"Until"!==e.slice(-5)&&(r=n),r&&"string"==typeof r&&(i=x.filter(r,i)),this.length>1&&(ct[e]||(i=x.unique(i)),lt.test(e)&&(i=i.reverse())),this.pushStack(i)}}),x.extend({filter:function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?x.find.matchesSelector(r,e)?[r]:[]:x.find.matches(e,x.grep(t,function(e){return 1===e.nodeType}))},dir:function(e,n,r){var i=[],o=e[n];while(o&&9!==o.nodeType&&(r===t||1!==o.nodeType||!x(o).is(r)))1===o.nodeType&&i.push(o),o=o[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n}});function ft(e,t,n){if(x.isFunction(t))return x.grep(e,function(e,r){return!!t.call(e,r,e)!==n});if(t.nodeType)return x.grep(e,function(e){return e===t!==n});if("string"==typeof t){if(st.test(t))return x.filter(t,e,n);t=x.filter(t,e)}return x.grep(e,function(e){return x.inArray(e,t)>=0!==n})}function dt(e){var t=ht.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}var ht="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",gt=/ jQuery\d+="(?:null|\d+)"/g,mt=RegExp("<(?:"+ht+")[\\s/>]","i"),yt=/^\s+/,vt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bt=/<([\w:]+)/,xt=/\s*$/g,At={option:[1,""],legend:[1,"
","
"],area:[1,"",""],param:[1,"",""],thead:[1,"","
"],tr:[2,"","
"],col:[2,"","
"],td:[3,"","
"],_default:x.support.htmlSerialize?[0,"",""]:[1,"X
","
"]},jt=dt(a),Dt=jt.appendChild(a.createElement("div"));At.optgroup=At.option,At.tbody=At.tfoot=At.colgroup=At.caption=At.thead,At.th=At.td,x.fn.extend({text:function(e){return x.access(this,function(e){return e===t?x.text(this):this.empty().append((this[0]&&this[0].ownerDocument||a).createTextNode(e))},null,e,arguments.length)},append:function(){return this.domManip(arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Lt(this,e);t.appendChild(e)}})},prepend:function(){return this.domManip(arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Lt(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return this.domManip(arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return this.domManip(arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},remove:function(e,t){var n,r=e?x.filter(e,this):this,i=0;for(;null!=(n=r[i]);i++)t||1!==n.nodeType||x.cleanData(Ft(n)),n.parentNode&&(t&&x.contains(n.ownerDocument,n)&&_t(Ft(n,"script")),n.parentNode.removeChild(n));return this},empty:function(){var e,t=0;for(;null!=(e=this[t]);t++){1===e.nodeType&&x.cleanData(Ft(e,!1));while(e.firstChild)e.removeChild(e.firstChild);e.options&&x.nodeName(e,"select")&&(e.options.length=0)}return this},clone:function(e,t){return e=null==e?!1:e,t=null==t?e:t,this.map(function(){return x.clone(this,e,t)})},html:function(e){return x.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return 1===n.nodeType?n.innerHTML.replace(gt,""):t;if(!("string"!=typeof e||Tt.test(e)||!x.support.htmlSerialize&&mt.test(e)||!x.support.leadingWhitespace&&yt.test(e)||At[(bt.exec(e)||["",""])[1].toLowerCase()])){e=e.replace(vt,"<$1>");try{for(;i>r;r++)n=this[r]||{},1===n.nodeType&&(x.cleanData(Ft(n,!1)),n.innerHTML=e);n=0}catch(o){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var e=x.map(this,function(e){return[e.nextSibling,e.parentNode]}),t=0;return this.domManip(arguments,function(n){var r=e[t++],i=e[t++];i&&(r&&r.parentNode!==i&&(r=this.nextSibling),x(this).remove(),i.insertBefore(n,r))},!0),t?this:this.remove()},detach:function(e){return this.remove(e,!0)},domManip:function(e,t,n){e=d.apply([],e);var r,i,o,a,s,l,u=0,c=this.length,p=this,f=c-1,h=e[0],g=x.isFunction(h);if(g||!(1>=c||"string"!=typeof h||x.support.checkClone)&&Nt.test(h))return this.each(function(r){var i=p.eq(r);g&&(e[0]=h.call(this,r,i.html())),i.domManip(e,t,n)});if(c&&(l=x.buildFragment(e,this[0].ownerDocument,!1,!n&&this),r=l.firstChild,1===l.childNodes.length&&(l=r),r)){for(a=x.map(Ft(l,"script"),Ht),o=a.length;c>u;u++)i=l,u!==f&&(i=x.clone(i,!0,!0),o&&x.merge(a,Ft(i,"script"))),t.call(this[u],i,u);if(o)for(s=a[a.length-1].ownerDocument,x.map(a,qt),u=0;o>u;u++)i=a[u],kt.test(i.type||"")&&!x._data(i,"globalEval")&&x.contains(s,i)&&(i.src?x._evalUrl(i.src):x.globalEval((i.text||i.textContent||i.innerHTML||"").replace(St,"")));l=r=null}return this}});function Lt(e,t){return x.nodeName(e,"table")&&x.nodeName(1===t.nodeType?t:t.firstChild,"tr")?e.getElementsByTagName("tbody")[0]||e.appendChild(e.ownerDocument.createElement("tbody")):e}function Ht(e){return e.type=(null!==x.find.attr(e,"type"))+"/"+e.type,e}function qt(e){var t=Et.exec(e.type);return t?e.type=t[1]:e.removeAttribute("type"),e}function _t(e,t){var n,r=0;for(;null!=(n=e[r]);r++)x._data(n,"globalEval",!t||x._data(t[r],"globalEval"))}function Mt(e,t){if(1===t.nodeType&&x.hasData(e)){var n,r,i,o=x._data(e),a=x._data(t,o),s=o.events;if(s){delete a.handle,a.events={};for(n in s)for(r=0,i=s[n].length;i>r;r++)x.event.add(t,n,s[n][r])}a.data&&(a.data=x.extend({},a.data))}}function Ot(e,t){var n,r,i;if(1===t.nodeType){if(n=t.nodeName.toLowerCase(),!x.support.noCloneEvent&&t[x.expando]){i=x._data(t);for(r in i.events)x.removeEvent(t,r,i.handle);t.removeAttribute(x.expando)}"script"===n&&t.text!==e.text?(Ht(t).text=e.text,qt(t)):"object"===n?(t.parentNode&&(t.outerHTML=e.outerHTML),x.support.html5Clone&&e.innerHTML&&!x.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):"input"===n&&Ct.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):"option"===n?t.defaultSelected=t.selected=e.defaultSelected:("input"===n||"textarea"===n)&&(t.defaultValue=e.defaultValue)}}x.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){x.fn[e]=function(e){var n,r=0,i=[],o=x(e),a=o.length-1;for(;a>=r;r++)n=r===a?this:this.clone(!0),x(o[r])[t](n),h.apply(i,n.get());return this.pushStack(i)}});function Ft(e,n){var r,o,a=0,s=typeof e.getElementsByTagName!==i?e.getElementsByTagName(n||"*"):typeof e.querySelectorAll!==i?e.querySelectorAll(n||"*"):t;if(!s)for(s=[],r=e.childNodes||e;null!=(o=r[a]);a++)!n||x.nodeName(o,n)?s.push(o):x.merge(s,Ft(o,n));return n===t||n&&x.nodeName(e,n)?x.merge([e],s):s}function Bt(e){Ct.test(e.type)&&(e.defaultChecked=e.checked)}x.extend({clone:function(e,t,n){var r,i,o,a,s,l=x.contains(e.ownerDocument,e);if(x.support.html5Clone||x.isXMLDoc(e)||!mt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(Dt.innerHTML=e.outerHTML,Dt.removeChild(o=Dt.firstChild)),!(x.support.noCloneEvent&&x.support.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||x.isXMLDoc(e)))for(r=Ft(o),s=Ft(e),a=0;null!=(i=s[a]);++a)r[a]&&Ot(i,r[a]);if(t)if(n)for(s=s||Ft(e),r=r||Ft(o),a=0;null!=(i=s[a]);a++)Mt(i,r[a]);else Mt(e,o);return r=Ft(o,"script"),r.length>0&&_t(r,!l&&Ft(e,"script")),r=s=i=null,o},buildFragment:function(e,t,n,r){var i,o,a,s,l,u,c,p=e.length,f=dt(t),d=[],h=0;for(;p>h;h++)if(o=e[h],o||0===o)if("object"===x.type(o))x.merge(d,o.nodeType?[o]:o);else if(wt.test(o)){s=s||f.appendChild(t.createElement("div")),l=(bt.exec(o)||["",""])[1].toLowerCase(),c=At[l]||At._default,s.innerHTML=c[1]+o.replace(vt,"<$1>")+c[2],i=c[0];while(i--)s=s.lastChild;if(!x.support.leadingWhitespace&&yt.test(o)&&d.push(t.createTextNode(yt.exec(o)[0])),!x.support.tbody){o="table"!==l||xt.test(o)?""!==c[1]||xt.test(o)?0:s:s.firstChild,i=o&&o.childNodes.length;while(i--)x.nodeName(u=o.childNodes[i],"tbody")&&!u.childNodes.length&&o.removeChild(u)}x.merge(d,s.childNodes),s.textContent="";while(s.firstChild)s.removeChild(s.firstChild);s=f.lastChild}else d.push(t.createTextNode(o));s&&f.removeChild(s),x.support.appendChecked||x.grep(Ft(d,"input"),Bt),h=0;while(o=d[h++])if((!r||-1===x.inArray(o,r))&&(a=x.contains(o.ownerDocument,o),s=Ft(f.appendChild(o),"script"),a&&_t(s),n)){i=0;while(o=s[i++])kt.test(o.type||"")&&n.push(o)}return s=null,f},cleanData:function(e,t){var n,r,o,a,s=0,l=x.expando,u=x.cache,c=x.support.deleteExpando,f=x.event.special;for(;null!=(n=e[s]);s++)if((t||x.acceptData(n))&&(o=n[l],a=o&&u[o])){if(a.events)for(r in a.events)f[r]?x.event.remove(n,r):x.removeEvent(n,r,a.handle); +u[o]&&(delete u[o],c?delete n[l]:typeof n.removeAttribute!==i?n.removeAttribute(l):n[l]=null,p.push(o))}},_evalUrl:function(e){return x.ajax({url:e,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})}}),x.fn.extend({wrapAll:function(e){if(x.isFunction(e))return this.each(function(t){x(this).wrapAll(e.call(this,t))});if(this[0]){var t=x(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&1===e.firstChild.nodeType)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return x.isFunction(e)?this.each(function(t){x(this).wrapInner(e.call(this,t))}):this.each(function(){var t=x(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=x.isFunction(e);return this.each(function(n){x(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){x.nodeName(this,"body")||x(this).replaceWith(this.childNodes)}).end()}});var Pt,Rt,Wt,$t=/alpha\([^)]*\)/i,It=/opacity\s*=\s*([^)]*)/,zt=/^(top|right|bottom|left)$/,Xt=/^(none|table(?!-c[ea]).+)/,Ut=/^margin/,Vt=RegExp("^("+w+")(.*)$","i"),Yt=RegExp("^("+w+")(?!px)[a-z%]+$","i"),Jt=RegExp("^([+-])=("+w+")","i"),Gt={BODY:"block"},Qt={position:"absolute",visibility:"hidden",display:"block"},Kt={letterSpacing:0,fontWeight:400},Zt=["Top","Right","Bottom","Left"],en=["Webkit","O","Moz","ms"];function tn(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=en.length;while(i--)if(t=en[i]+n,t in e)return t;return r}function nn(e,t){return e=t||e,"none"===x.css(e,"display")||!x.contains(e.ownerDocument,e)}function rn(e,t){var n,r,i,o=[],a=0,s=e.length;for(;s>a;a++)r=e[a],r.style&&(o[a]=x._data(r,"olddisplay"),n=r.style.display,t?(o[a]||"none"!==n||(r.style.display=""),""===r.style.display&&nn(r)&&(o[a]=x._data(r,"olddisplay",ln(r.nodeName)))):o[a]||(i=nn(r),(n&&"none"!==n||!i)&&x._data(r,"olddisplay",i?n:x.css(r,"display"))));for(a=0;s>a;a++)r=e[a],r.style&&(t&&"none"!==r.style.display&&""!==r.style.display||(r.style.display=t?o[a]||"":"none"));return e}x.fn.extend({css:function(e,n){return x.access(this,function(e,n,r){var i,o,a={},s=0;if(x.isArray(n)){for(o=Rt(e),i=n.length;i>s;s++)a[n[s]]=x.css(e,n[s],!1,o);return a}return r!==t?x.style(e,n,r):x.css(e,n)},e,n,arguments.length>1)},show:function(){return rn(this,!0)},hide:function(){return rn(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){nn(this)?x(this).show():x(this).hide()})}}),x.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Wt(e,"opacity");return""===n?"1":n}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":x.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var o,a,s,l=x.camelCase(n),u=e.style;if(n=x.cssProps[l]||(x.cssProps[l]=tn(u,l)),s=x.cssHooks[n]||x.cssHooks[l],r===t)return s&&"get"in s&&(o=s.get(e,!1,i))!==t?o:u[n];if(a=typeof r,"string"===a&&(o=Jt.exec(r))&&(r=(o[1]+1)*o[2]+parseFloat(x.css(e,n)),a="number"),!(null==r||"number"===a&&isNaN(r)||("number"!==a||x.cssNumber[l]||(r+="px"),x.support.clearCloneStyle||""!==r||0!==n.indexOf("background")||(u[n]="inherit"),s&&"set"in s&&(r=s.set(e,r,i))===t)))try{u[n]=r}catch(c){}}},css:function(e,n,r,i){var o,a,s,l=x.camelCase(n);return n=x.cssProps[l]||(x.cssProps[l]=tn(e.style,l)),s=x.cssHooks[n]||x.cssHooks[l],s&&"get"in s&&(a=s.get(e,!0,r)),a===t&&(a=Wt(e,n,i)),"normal"===a&&n in Kt&&(a=Kt[n]),""===r||r?(o=parseFloat(a),r===!0||x.isNumeric(o)?o||0:a):a}}),e.getComputedStyle?(Rt=function(t){return e.getComputedStyle(t,null)},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),l=s?s.getPropertyValue(n)||s[n]:t,u=e.style;return s&&(""!==l||x.contains(e.ownerDocument,e)||(l=x.style(e,n)),Yt.test(l)&&Ut.test(n)&&(i=u.width,o=u.minWidth,a=u.maxWidth,u.minWidth=u.maxWidth=u.width=l,l=s.width,u.width=i,u.minWidth=o,u.maxWidth=a)),l}):a.documentElement.currentStyle&&(Rt=function(e){return e.currentStyle},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),l=s?s[n]:t,u=e.style;return null==l&&u&&u[n]&&(l=u[n]),Yt.test(l)&&!zt.test(n)&&(i=u.left,o=e.runtimeStyle,a=o&&o.left,a&&(o.left=e.currentStyle.left),u.left="fontSize"===n?"1em":l,l=u.pixelLeft+"px",u.left=i,a&&(o.left=a)),""===l?"auto":l});function on(e,t,n){var r=Vt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function an(e,t,n,r,i){var o=n===(r?"border":"content")?4:"width"===t?1:0,a=0;for(;4>o;o+=2)"margin"===n&&(a+=x.css(e,n+Zt[o],!0,i)),r?("content"===n&&(a-=x.css(e,"padding"+Zt[o],!0,i)),"margin"!==n&&(a-=x.css(e,"border"+Zt[o]+"Width",!0,i))):(a+=x.css(e,"padding"+Zt[o],!0,i),"padding"!==n&&(a+=x.css(e,"border"+Zt[o]+"Width",!0,i)));return a}function sn(e,t,n){var r=!0,i="width"===t?e.offsetWidth:e.offsetHeight,o=Rt(e),a=x.support.boxSizing&&"border-box"===x.css(e,"boxSizing",!1,o);if(0>=i||null==i){if(i=Wt(e,t,o),(0>i||null==i)&&(i=e.style[t]),Yt.test(i))return i;r=a&&(x.support.boxSizingReliable||i===e.style[t]),i=parseFloat(i)||0}return i+an(e,t,n||(a?"border":"content"),r,o)+"px"}function ln(e){var t=a,n=Gt[e];return n||(n=un(e,t),"none"!==n&&n||(Pt=(Pt||x("