一早芭、前言
虹軟開發(fā)SDK以來,其免費(fèi)使用的營銷策略恬叹,成功降低了中小企業(yè)使用人臉識(shí)別技術(shù)的成本。然而同眯,對(duì).NET開發(fā)者來說绽昼,虹軟沒有提供C#版本的SDK供開發(fā)者直接調(diào)用(為什么JAVA就有?P胛稀)硅确,而是建議開發(fā)者利用C++版本封裝。大齡的C系程序員都一般是從C開始學(xué)習(xí)的,但對(duì)年輕的開發(fā)者來說绊率,指針操作似乎非常棘手缠借。這無形中給虹軟SDK的應(yīng)用帶來了較大的挑戰(zhàn)。尤其在多人場景下循未,需要指針移動(dòng)來獲取全部人臉數(shù)據(jù)。本文通過在.net 5環(huán)境下秫舌,實(shí)現(xiàn)一個(gè)基于C/S模式的多人實(shí)時(shí)識(shí)別案例的妖,希望對(duì)廣大.NETer在運(yùn)用虹軟SDK的時(shí)候有一定參考意義。
二足陨、項(xiàng)目結(jié)構(gòu)
1.開發(fā)環(huán)境 .net5(正式版) 或 .net core3.1 (前后端都是I┧凇)
-
Client端(WPF框架)
w1.png -
Service端(gRPC框架)
S1.png 開發(fā)工具 / 平臺(tái)
VS2019 / Win10
三、項(xiàng)目依賴(nuget)
-
Client端
w2.png -
Service端
s2.png
四墨缘,項(xiàng)目主要流程
Step1. 客服端監(jiān)控提取圖像種人臉特征星虹。
Step2. 客服端將人臉特征封裝入Request Stream零抬,發(fā)送至服務(wù)端。
Step3. 服務(wù)端逐一解析Request Stream中人臉特征搁凸,并進(jìn)行對(duì)比識(shí)別媚值。
Step4. 服務(wù)端將結(jié)果寫入Response Stream返回。
Step5. 客服端逐一解析Response Stream并顯示护糖。
五褥芒,核心代碼解析
- C++ dll 封裝
建議把虹軟的dll封裝成一個(gè).net core類庫,方便前后端調(diào)用嫡良。
1.png
using System;
using System.Runtime.InteropServices;
namespace ArcSoft
{
public class Arcsoft_Face_3_0
{
public const string Dll_PATH = "libarcsoft_face_engine.dll";
/// <summary>
/// 獲取激活文件信息锰扶。
/// </summary>
/// <param name="activeFileInfo">激活文件信息</param>
/// <returns></returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFGetActiveFileInfo(IntPtr activeFileInfo);
/// <summary>
/// 用于在線激活SDK。
/// </summary>
/// <param name="appId">官網(wǎng)獲取的APPID</param>
/// <param name="sdkKey">官網(wǎng)獲取的SDKKEY</param>
/// <returns></returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFOnlineActivation(string appId, string sdkKey);
/// <summary>
/// 激活人臉識(shí)別SDK引擎函數(shù),ASFActivation 接口與ASFOnlineActivation 功能一致寝受,用于兼容老用戶坷牛。
/// </summary>
/// <param name="appId">SDK對(duì)應(yīng)的AppID</param>
/// <param name="sdkKey">SDK對(duì)應(yīng)的SDKKey</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFActivation(string appId, string sdkKey);
/// <summary>
/// 初始化引擎
/// </summary>
/// <param name="detectMode">AF_DETECT_MODE_VIDEO 視頻模式 | AF_DETECT_MODE_IMAGE 圖片模式</param>
/// <param name="detectFaceOrientPriority">檢測臉部的角度優(yōu)先值,推薦:ASF_OrientPriority.ASF_OP_0_HIGHER_EXT</param>
/// <param name="detectFaceScaleVal">用于數(shù)值化表示的最小人臉尺寸</param>
/// <param name="detectFaceMaxNum">最大需要檢測的人臉個(gè)數(shù)</param>
/// <param name="combinedMask">用戶選擇需要檢測的功能組合很澄,可單個(gè)或多個(gè)</param>
/// <param name="hEngine">初始化返回的引擎handle</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFInitEngine(uint detectMode, int detectFaceOrientPriority, int detectFaceScaleVal, int detectFaceMaxNum, int combinedMask, ref IntPtr hEngine);
/// <summary>
/// 人臉檢測
/// </summary>
/// <param name="hEngine">引擎handle</param>
/// <param name="width">圖像寬度</param>
/// <param name="height">圖像高度</param>
/// <param name="format">圖像顏色空間</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="detectedFaces">人臉檢測結(jié)果</param>
/// <param name="detectModel">預(yù)留字段京闰,當(dāng)前版本使用默認(rèn)參數(shù)即可</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFDetectFaces(IntPtr hEngine, int width, int height, int format, IntPtr imgData, IntPtr detectedFaces, int detectModel);
/// <summary>
/// 檢測人臉信息。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="ImgData">圖像數(shù)據(jù)</param>
/// <param name="detectedFaces">檢測到的人臉信息</param>
/// <param name="detectModel">預(yù)留字段甩苛,當(dāng)前版本使用默認(rèn)參數(shù)即可</param>
/// <returns>人臉信息</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFDetectFacesEx(IntPtr hEngine, IntPtr ImgData, out IntPtr detectedFaces, int detectModel);
/// <summary>
/// 單人臉特征提取
/// </summary>
/// <param name="hEngine">引擎handle</param>
/// <param name="width">圖像寬度蹂楣,為4的倍數(shù)</param>
/// <param name="height">圖像高度,YUYV/I420/NV21/NV12格式為2的倍數(shù)讯蒲;BGR24/GRAY/DEPTH_U16格式無限制</param>
/// <param name="format">圖像顏色空間</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="faceInfo">單人臉信息(人臉框痊土、人臉角度)</param>
/// <param name="faceFeature">提取到的人臉特征信息</param>
/// <returns>人臉特征信息</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFFaceFeatureExtract(IntPtr hEngine, int width, int height, int format, IntPtr imgData, IntPtr faceInfo, IntPtr faceFeature);
/// <summary>
/// 單人特征提取。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="faceInfo">單人臉信息(人臉框墨林、人臉角度)</param>
/// <param name="feature">提取到的人臉特征信息</param>
/// <returns></returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFFaceFeatureExtractEx(IntPtr hEngine, IntPtr imgData, IntPtr faceInfo, IntPtr feature);
/// <summary>
/// 人臉特征比對(duì)赁酝,輸出比對(duì)相似度。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="feature1">人臉特征</param>
/// <param name="feature2">人臉特征</param>
/// <param name="confidenceLevel">比對(duì)相似度</param>
/// <param name="compareModel">選擇人臉特征比對(duì)模型旭等,默認(rèn)為ASF_LIFE_PHOTO酌呆。
/// 1. ASF_LIFE_PHOTO:用于生活照之間的特征比對(duì),推薦閾值0.80搔耕;
/// 2. ASF_ID_PHOTO:用于證件照或證件照和生活照之間的特征比對(duì)隙袁,推薦閾值0.82;</param>
/// <returns>比對(duì)相似度</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFFaceFeatureCompare(IntPtr hEngine, IntPtr feature1, IntPtr feature2, ref float confidenceLevel, int compareModel);
/// <summary>
/// 設(shè)置RGB/IR活體閾值度迂,若不設(shè)置內(nèi)部默認(rèn)RGB:0.5 IR:0.7藤乙。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="threshold">活體閾值猜揪,推薦RGB:0.5 IR:0.7</param>
/// <returns>設(shè)置狀態(tài)</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFSetLivenessParam(IntPtr hEngine, IntPtr threshold);
/// <summary>
/// 人臉屬性檢測
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="width">圖片寬度惭墓,為4的倍數(shù)</param>
/// <param name="height">圖片高度,YUYV/I420/NV21/NV12格式為2的倍數(shù)而姐;BGR24格式無限制腊凶;</param>
/// <param name="format">支持YUYV/I420/NV21/NV12/BGR24</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="detectedFaces">多人臉信息</param>
/// <param name="combinedMask">1.檢測的屬性(ASF_AGE、ASF_GENDER、 ASF_FACE3DANGLE钧萍、ASF_LIVENESS)褐缠,支持多選
/// 2.檢測的屬性須在引擎初始化接口的combinedMask參數(shù)中啟用</param>
/// <returns>檢測狀態(tài)</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFProcess(IntPtr hEngine, int width, int height, int format, IntPtr imgData, IntPtr detectedFaces, int combinedMask);
/// <summary>
/// 人臉信息檢測(年齡/性別/人臉3D角度),最多支持4張人臉信息檢測风瘦,超過部分返回未知(活體僅支持單張人臉檢測队魏,超出返回未知),接口不支持IR圖像檢測。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="detectedFaces">多人臉信息</param>
/// <param name="combinedMask">1.檢測的屬性(ASF_AGE万搔、ASF_GENDER胡桨、 ASF_FACE3DANGLE、ASF_LIVENESS)瞬雹,支持多選
/// 2.檢測的屬性須在引擎初始化接口的combinedMask參數(shù)中啟用</param>
/// <returns>檢測狀態(tài)</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFProcessEx(IntPtr hEngine, IntPtr imgData, IntPtr detectedFaces, int combinedMask);
/// <summary>
/// 獲取年齡信息
/// </summary>
/// <param name="hEngine">引擎handle</param>
/// <param name="ageInfo">檢測到的年齡信息</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFGetAge(IntPtr hEngine, IntPtr ageInfo);
/// <summary>
/// 獲取性別信息
/// </summary>
/// <param name="hEngine">引擎handle</param>
/// <param name="genderInfo">檢測到的性別信息</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFGetGender(IntPtr hEngine, IntPtr genderInfo);
/// <summary>
/// 獲取3D角度信息
/// </summary>
/// <param name="hEngine">引擎handle</param>
/// <param name="p3DAngleInfo">檢測到臉部3D角度信息</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFGetFace3DAngle(IntPtr hEngine, IntPtr p3DAngleInfo);
/// <summary>
/// 獲取RGB活體信息昧谊。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="livenessInfo">檢測到的活體信息</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFGetLivenessScore(IntPtr hEngine, IntPtr livenessInfo);
/// <summary>
/// 該接口僅支持單人臉I(yè)R 活體檢測,超出返回未知酗捌。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="width">圖片寬度呢诬,為4的倍數(shù)</param>
/// <param name="height">圖片高度</param>
/// <param name="format">圖像顏色格式</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="detectedFaces">多人臉信息</param>
/// <param name="combinedMask">目前僅支持ASF_IR_LIVENESS</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFProcess_IR(IntPtr hEngine, int width, int height, int format, IntPtr imgData, IntPtr detectedFaces, int combinedMask);
/// <summary>
/// 該接口僅支持單人臉I(yè)R 活體檢測,超出返回未知胖缤。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="imgData">圖像數(shù)據(jù)</param>
/// <param name="detectedFaces">多人臉信息</param>
/// <param name="combinedMask">目前僅支持ASF_IR_LIVENESS</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFProcessEx_IR(IntPtr hEngine, IntPtr imgData, IntPtr detectedFaces, int combinedMask);
/// <summary>
/// 獲取IR活體信息尚镰。
/// </summary>
/// <param name="hEngine">引擎句柄</param>
/// <param name="livenessInfo">檢測到的IR活體信息</param>
/// <returns></returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFGetLivenessScore_IR(IntPtr hEngine, IntPtr livenessInfo);
/// <summary>
/// 獲取SDK版本信息。
/// </summary>
/// <returns>成功返回版本信息草姻,失敗返回Null钓猬。</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern ASF_VERSION ASFGetVersion();
/// <summary>
/// 銷毀SDK引擎。
/// </summary>
/// <param name="pEngine">引擎handle</param>
/// <returns>調(diào)用結(jié)果</returns>
[DllImport(Dll_PATH, CallingConvention = CallingConvention.Cdecl)]
public static extern int ASFUninitEngine(IntPtr pEngine);
}
/////////////////////////////////參數(shù)枚舉/////////////////////////////////
/// <summary>
/// 檢測模式
/// </summary>
public struct ASF_DetectMode
{
/// <summary>
/// Video模式撩独,一般用于多幀連續(xù)檢測
/// </summary>
public const uint ASF_DETECT_MODE_VIDEO = 0x00000000;
/// <summary>
/// Image模式敞曹,一般用于靜態(tài)圖的單次檢測
/// </summary>
public const uint ASF_DETECT_MODE_IMAGE = 0xFFFFFFFF;
}
/// <summary>
/// 人臉檢測方向
/// </summary>
public struct ArcSoftFace_OrientPriority
{
/// <summary>
/// 常規(guī)預(yù)覽下正方向
/// </summary>
public const int ASF_OP_0_ONLY = 0x1;
/// <summary>
/// 基于0°逆時(shí)針旋轉(zhuǎn)90°的方向
/// </summary>
public const int ASF_OP_90_ONLY = 0x2;
/// <summary>
/// 基于0°逆時(shí)針旋轉(zhuǎn)270°的方向
/// </summary>
public const int ASF_OP_270_ONLY = 0x3;
/// <summary>
/// 基于0°旋轉(zhuǎn)180°的方向(逆時(shí)針、順時(shí)針效果一樣)
/// </summary>
public const int ASF_OP_180_ONLY = 0x4;
/// <summary>
/// 全角度
/// </summary>
public const int ASF_OP_0_HIGHER_EXT = 0x5;
}
/// <summary>
/// 檢測到的人臉角度
/// </summary>
public struct ArcSoftFace_OrientCode
{
public const int ASF_OC_0 = 0x1; // 0度
public const int ASF_OC_90 = 0x2; // 90度
public const int ASF_OC_270 = 0x3; // 270度
public const int ASF_OC_180 = 0x4; // 180度
public const int ASF_OC_30 = 0x5; // 30度
public const int ASF_OC_60 = 0x6; // 60度
public const int ASF_OC_120 = 0x7; // 120度
public const int ASF_OC_150 = 0x8; // 150度
public const int ASF_OC_210 = 0x9; // 210度
public const int ASF_OC_240 = 0xa; // 240度
public const int ASF_OC_300 = 0xb; // 300度
public const int ASF_OC_330 = 0xc; // 330度
}
/// <summary>
/// 檢測模型
/// </summary>
public struct ASF_DetectModel
{
public const int ASF_DETECT_MODEL_RGB = 0x1; //RGB圖像檢測模型
//預(yù)留擴(kuò)展其他檢測模型
}
/// <summary>
/// 人臉比對(duì)可選的模型
/// </summary>
public struct ASF_CompareModel
{
public const int ASF_LIFE_PHOTO = 0x1; //用于生活照之間的特征比對(duì)综膀,推薦閾值0.80
public const int ASF_ID_PHOTO = 0x2; //用于證件照或生活照與證件照之間的特征比對(duì)澳迫,推薦閾值0.82
}
/// <summary>
/// 支持的顏色空間顏色格式
/// </summary>
public struct ASF_ImagePixelFormat
{
//8-bit Y 通道,8-bit 2x2 采樣 V 與 U 分量交織通道
public const int ASVL_PAF_NV21 = 2050;
//8-bit Y 通道剧劝,8-bit 2x2 采樣 U 與 V 分量交織通道
public const int ASVL_PAF_NV12 = 2049;
//RGB 分量交織橄登,按 B, G, R, B 字節(jié)序排布
public const int ASVL_PAF_RGB24_B8G8R8 = 513;
//8-bit Y 通道, 8-bit 2x2 采樣 U 通道讥此, 8-bit 2x2 采樣 V 通道
public const int ASVL_PAF_I420 = 1537;
//YUV 分量交織拢锹, V 與 U 分量 2x1 采樣,按 Y0, U0, Y1, V0 字節(jié)序排布
public const int ASVL_PAF_YUYV = 1289;
//8-bit IR圖像
public const int ASVL_PAF_GRAY = 1793;
//16-bit IR圖像,ASVL_PAF_DEPTH_U16 只是預(yù)留萄喳。
public const int ASVL_PAF_DEPTH_U16 = 3074;
}
/// <summary>
/// 算法功能常量值
/// </summary>
public struct FaceEngineMask
{
//人臉檢測
public const int ASF_FACE_DETECT = 0x00000001;
//人臉特征
public const int ASF_FACERECOGNITION = 0x00000004;
//年齡
public const int ASF_AGE = 0x00000008;
//性別
public const int ASF_GENDER = 0x00000010;
//3D角度
public const int ASF_FACE3DANGLE = 0x00000020;
//RGB活體
public const int ASF_LIVENESS = 0x00000080;
//IR活體
public const int ASF_IR_LIVENESS = 0x00000400;
}
/////////////////////////////////數(shù)據(jù)結(jié)構(gòu)/////////////////////////////////
/// <summary>
/// SDK版本信息卒稳。
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
public struct ASF_VERSION
{
//版本號(hào)
public IntPtr Version;
//構(gòu)建日期
public IntPtr BuildDate;
//版權(quán)說明
public IntPtr CopyRight;
}
/// <summary>
/// 激活文件信息。
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
public struct ASF_ActiveFileInfo
{
/// <summary>
/// 開始時(shí)間
/// </summary>
public IntPtr startTime;
/// <summary>
/// 截止時(shí)間
/// </summary>
public IntPtr endTime;
/// <summary>
/// 平臺(tái)
/// </summary>
public IntPtr platform;
/// <summary>
/// sdk類型
/// </summary>
public IntPtr sdkType;
/// <summary>
/// APPID
/// </summary>
public IntPtr appId;
/// <summary>
/// SDKKEY
/// </summary>
public IntPtr sdkKey;
/// <summary>
/// SDK版本號(hào)
/// </summary>
public IntPtr sdkVersion;
/// <summary>
/// 激活文件版本號(hào)
/// </summary>
public IntPtr fileVersion;
}
/// <summary>
/// 人臉框信息他巨。
/// </summary>
public struct MRECT
{
public int left;
public int top;
public int right;
public int bottom;
}
/// <summary>
/// 單人臉信息充坑。
/// </summary>
public struct ASF_SingleFaceInfo
{
// 人臉框
public MRECT faceRect;
//人臉角度
public int faceOrient;
}
/// <summary>
/// 多人臉信息减江。
/// </summary>
public struct ASF_MultiFaceInfo
{
// 人臉框數(shù)組
public IntPtr faceRects;
// 人臉角度數(shù)組
public IntPtr faceOrients;
// 檢測到的人臉數(shù)
public int faceNum;
// 一張人臉從進(jìn)入畫面直到離開畫面,faceID不變捻爷。在VIDEO模式下有效辈灼,IMAGE模式下為空。
public IntPtr faceID;
}
/// <summary>
/// 人臉特征也榄。
/// </summary>
public struct ASF_FaceFeature
{
// 人臉特征
public IntPtr feature;
// 人臉特征長度
public int featureSize;
}
/// <summary>
/// 年齡信息巡莹。
/// </summary>
public struct ASF_AgeInfo
{
//0:未知; >0:年齡
IntPtr ageArray;
//檢測的人臉數(shù)
int num;
}
/// <summary>
/// 性別信息。
/// </summary>
public struct ASF_GenderInfo
{
//0:男性; 1:女性; -1:未知
IntPtr genderArray;
//檢測的人臉數(shù)
int num;
}
/// <summary>
/// 3D角度信息甜紫。
/// </summary>
public struct ASF_Face3DAngle
{
//橫滾角
public IntPtr roll;
//偏航角
public IntPtr yaw;
//俯仰角
public IntPtr pitch;
//0:正常; 非0:異常
public IntPtr status;
//檢測的人臉個(gè)數(shù)
public IntPtr num;
}
/// <summary>
/// 活體置信度榕莺。
/// </summary>
public struct ASF_LivenessThreshold
{
// BGR活體檢測閾值設(shè)置,默認(rèn)值0.5
float thresholdmodel_BGR;
// IR活體檢測閾值設(shè)置棵介,默認(rèn)值0.7
float thresholdmodel_IR;
}
/// <summary>
/// 活體信息钉鸯。
/// </summary>
public struct ASF_LivenessInfo
{
//0:非真人; 1:真人邮辽;-1:不確定唠雕; -2:傳入人臉數(shù) > 1;-3: 人臉過卸质觥岩睁;-4: 角度過大;-5: 人臉超出邊界
public IntPtr isLive;
//檢測的人臉個(gè)數(shù)
public int num;
}
/// <summary>
/// 圖像數(shù)據(jù)信息揣云。
/// </summary>
public struct ASVLOFFSCREEN
{
public uint u32PixelArrayFormat;
public int i32Width;
public int i32Height;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 4, ArraySubType = UnmanagedType.SysUInt)]
public IntPtr[] ppu8Plane;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 4, ArraySubType = UnmanagedType.I4)]
public int[] pi32Pitch;
}
}
using ArcSoft.Utilities;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Runtime.InteropServices;
namespace ArcSoft
{
public class Arcsoft_Face_Action : Arcsoft_Face_3_0, IEnginePoor
{
public string AppID { get; }
public string AppKey { get; }
public int FaceEngineNums { get; set; }
public int IDEngineNums { get; set; }
public int AIEngineNums { get; set; }
public ConcurrentQueue<IntPtr> FaceEnginePoor { get; set; }
public ConcurrentQueue<IntPtr> IDEnginePoor { get; set; }
public ConcurrentQueue<IntPtr> AIEnginePoor { get; set; }
public Arcsoft_Face_Action()
{
}
public Arcsoft_Face_Action(string appId, string appKey)
{
int retCode = -1;
try
{
retCode = ASFOnlineActivation(appId, appKey);
if (retCode == 0)
{
}
else if (retCode == 90114)
{
}
else
{
throw new Exception("SDK激活失敗捕儒,錯(cuò)誤碼:" + retCode);
}
AppID = appId;
AppKey = appKey;
}
catch (Exception ex)
{
throw new Exception($"Arcsoft_Face_Action 初始化失敗,異常:{ex.Message}");
}
}
public IntPtr InitASFEnginePtr(int faceMask, bool isImageMode = true)
{
IntPtr pEngines = IntPtr.Zero;
int retCode = -1;
try
{
if (isImageMode)
{
retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_HIGHER_EXT, ParmsBestPractice.detectFaceScaleVal_Image, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
}
else
{
retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_VIDEO, ArcSoftFace_OrientPriority.ASF_OP_0_HIGHER_EXT, ParmsBestPractice.detectFaceScaleVal_Video, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
}
if (retCode == 0)
{
}
else
{
throw new Exception("SDK初始化失敗邓夕,錯(cuò)誤碼:" + retCode);
}
return pEngines;
}
catch (Exception ex)
{
throw new Exception("ASFFunctions->ASFFunctions, generate exception as: " + ex);
}
}
public static ASF_MultiFaceInfo DetectMultipleFace(IntPtr pEngine, ImageInfo imageInfo)
{
ASF_MultiFaceInfo multiFaceInfo = new ASF_MultiFaceInfo();
IntPtr pMultiFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_MultiFaceInfo>());
try
{
int retCode = ASFDetectFaces(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pMultiFaceInfo, ASF_DetectModel.ASF_DETECT_MODEL_RGB);
multiFaceInfo = Marshal.PtrToStructure<ASF_MultiFaceInfo>(pMultiFaceInfo);
return multiFaceInfo;
}
catch
{
return multiFaceInfo;
}
finally
{
Marshal.FreeHGlobal(pMultiFaceInfo);
}
}
public static List<MarkFaceInfor> DetectMultipleFaceAllInformation(IntPtr pEngine, ImageInfo imageInfo, bool extractFaceData = false)
{
List<MarkFaceInfor> infors = new List<MarkFaceInfor>();
ASF_MultiFaceInfo multiFaceInfo = new ASF_MultiFaceInfo();
IntPtr pMultiFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_MultiFaceInfo>());
try
{
int retCode = ASFDetectFaces(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pMultiFaceInfo, ASF_DetectModel.ASF_DETECT_MODEL_RGB);
multiFaceInfo = Marshal.PtrToStructure<ASF_MultiFaceInfo>(pMultiFaceInfo);
for (int faceIndex = 0; faceIndex < multiFaceInfo.faceNum; faceIndex++)
{
ASF_SingleFaceInfo singleFaceInfo = new ASF_SingleFaceInfo();
singleFaceInfo.faceRect = Marshal.PtrToStructure<MRECT>(multiFaceInfo.faceRects + Marshal.SizeOf<MRECT>() * faceIndex);
singleFaceInfo.faceOrient = Marshal.PtrToStructure<int>(multiFaceInfo.faceOrients + Marshal.SizeOf<int>() * faceIndex);
MarkFaceInfor markFaceInfor = new MarkFaceInfor(singleFaceInfo.faceRect.left, singleFaceInfo.faceRect.top, singleFaceInfo.faceRect.right - singleFaceInfo.faceRect.left, singleFaceInfo.faceRect.bottom - singleFaceInfo.faceRect.top);
markFaceInfor.faceID = Marshal.PtrToStructure<int>(multiFaceInfo.faceID + Marshal.SizeOf<int>() * faceIndex);
if (extractFaceData)
{
markFaceInfor.faceFeatureData = ExtractSingleFaceFeature(pEngine, imageInfo, singleFaceInfo.faceRect, singleFaceInfo.faceOrient);
}
infors.Add(markFaceInfor);
}
return infors;
}
catch (Exception ex)
{
throw new Exception($"Arcsoft_Face_Action-->DetectMultipleFaceAllInformation 異常刘莹,異常信息:{ex.Message}");
}
finally
{
Marshal.FreeHGlobal(pMultiFaceInfo);
}
}
public static bool ExtractFeaturesFromMemoryStream(Stream ms, IntPtr engine, out List<byte[]> facesFeature, out string errorString)
{
facesFeature = new List<byte[]>();
errorString = null;
try
{
ImageInfo imageInfo = new ImageInfo();
ASF_MultiFaceInfo facesInfo = new ASF_MultiFaceInfo();
imageInfo = ImageHelper.ReadBMPFormStream(ms);
facesInfo = DetectMultipleFace(engine, imageInfo);
if (facesInfo.faceNum == 0)
{
errorString = "檢測到人臉數(shù)量為0,請免冠正對(duì)鏡頭重新識(shí)別焚刚!";
return false;
}
if (facesInfo.faceNum > 1)
{
errorString = "檢測到多張人臉点弯,請多余人員退出識(shí)別區(qū),再重新識(shí)別矿咕!";
return false;
}
facesFeature = ExtractAllFeatures(engine, imageInfo, facesInfo);
return true;
}
catch
{
errorString = "算法錯(cuò)誤抢肛,請檢查輸入后重試!";
return false;
}
finally
{
GC.Collect();
}
}
private static byte[] ExtractSingleFaceFeature(IntPtr pEngine, ImageInfo imageInfo, MRECT rect, int faceOrient)
{
var singleFaceInfo = new ASF_SingleFaceInfo();
singleFaceInfo.faceRect = rect;
singleFaceInfo.faceOrient = faceOrient;
IntPtr pSingleFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_SingleFaceInfo>());
Marshal.StructureToPtr(singleFaceInfo, pSingleFaceInfo, false);
IntPtr pFaceFeature = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_FaceFeature>());
try
{
int retCode = ASFFaceFeatureExtract(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pSingleFaceInfo, pFaceFeature);
if (retCode == 0)
{
ASF_FaceFeature faceFeature = Marshal.PtrToStructure<ASF_FaceFeature>(pFaceFeature);
byte[] feature = new byte[faceFeature.featureSize];
Marshal.Copy(faceFeature.feature, feature, 0, faceFeature.featureSize);
return feature;
}
if (retCode == 81925)
{
throw new Exception("人臉特征檢測結(jié)果置信度低!");
}
else
{
return null;
}
}
catch (Exception ex)
{
throw new Exception($"Arcsoft_Face_Action-->ExtractSingleFaceFeature exception: {ex.Message}");
}
finally
{
Marshal.FreeHGlobal(pSingleFaceInfo);
Marshal.FreeHGlobal(pFaceFeature);
}
}
public static List<byte[]> ExtractAllFeatures(IntPtr pEngine, ImageInfo imageInfo, ASF_MultiFaceInfo multiFaceInfo)
{
try
{
ASF_SingleFaceInfo singleFaceInfo = new ASF_SingleFaceInfo();
List<byte[]> results = new List<byte[]>();
for (int index = 0; index < multiFaceInfo.faceNum; index++)
{
singleFaceInfo.faceRect = Marshal.PtrToStructure<MRECT>(multiFaceInfo.faceRects + Marshal.SizeOf<MRECT>() * index);
singleFaceInfo.faceOrient = Marshal.PtrToStructure<int>(multiFaceInfo.faceOrients + Marshal.SizeOf<int>() * index);
byte[] singleFaceFeature = ExtractSingleFaceFeature(pEngine, imageInfo, singleFaceInfo.faceRect, singleFaceInfo.faceOrient);
if (singleFaceFeature != null)
{
results.Add(singleFaceFeature);
}
}
return results;
}
catch (Exception ex)
{
throw new Exception("Arcsoft_Face_Action-->ExtractAllFeatures exception " + ex);
}
finally
{
Marshal.FreeHGlobal(imageInfo.imgData);
}
}
public static IntPtr GetBMP_Ptr(Bitmap image, out int width, out int height, out int pitch)
{
IntPtr imageDataPtr = IntPtr.Zero;
try
{
width = -1;
height = -1;
pitch = -1;
byte[] imageData = ReadBMP(image, ref width, ref height, ref pitch);
imageDataPtr = Marshal.AllocHGlobal(imageData.Length);
Marshal.Copy(imageData, 0, imageDataPtr, imageData.Length);
return imageDataPtr;
}
catch (Exception ex)
{
Marshal.FreeHGlobal(imageDataPtr);
throw new Exception($"Arcsoft_Face_Action-->GetBMP_Ptr exception as:{ex.Message}");
}
}
public static byte[] ReadBMP(Bitmap image, ref int width, ref int height, ref int pitch)
{
//將Bitmap鎖定到系統(tǒng)內(nèi)存中,獲得BitmapData
BitmapData data = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
//位圖中第一個(gè)像素?cái)?shù)據(jù)的地址碳柱。它也可以看成是位圖中的第一個(gè)掃描行
IntPtr ptr = data.Scan0;
//定義數(shù)組長度
int soureBitArrayLength = data.Height * Math.Abs(data.Stride);
byte[] sourceBitArray = new byte[soureBitArrayLength];
//將bitmap中的內(nèi)容拷貝到ptr_bgr數(shù)組中
Marshal.Copy(ptr, sourceBitArray, 0, soureBitArrayLength); width = data.Width;
height = data.Height;
pitch = Math.Abs(data.Stride);
int line = width * 3;
int bgr_len = line * height;
byte[] destBitArray = new byte[bgr_len];
for (int i = 0; i < height; ++i)
{
Array.Copy(sourceBitArray, i * pitch, destBitArray, i * line, line);
}
pitch = line;
image.UnlockBits(data);
return destBitArray;
}
public static ASVLOFFSCREEN ChangeMat2ASVLOFFSCREEN(Bitmap image)
{
int width = -1;
int height = -1;
int pitch = -1;
IntPtr imagePtr = GetBMP_Ptr(image, out width, out height, out pitch);
ASVLOFFSCREEN offInput = new ASVLOFFSCREEN();
offInput.u32PixelArrayFormat = 513;
offInput.ppu8Plane = new IntPtr[4];
offInput.ppu8Plane[0] = imagePtr;
offInput.i32Width = width;
offInput.i32Height = height;
offInput.pi32Pitch = new int[4];
offInput.pi32Pitch[0] = pitch;
return offInput;
}
public static IntPtr PutFeatureByteIntoFeatureIntPtr(byte[] data)
{
try
{
if (data.Length > 0)
{
ASF_FaceFeature localFeature = new ASF_FaceFeature();
localFeature.featureSize = data.Length;
localFeature.feature = Marshal.AllocHGlobal(localFeature.featureSize);
Marshal.Copy(data, 0, localFeature.feature, data.Length);
IntPtr intPtrFeature = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_FaceFeature>());
Marshal.StructureToPtr(localFeature, intPtrFeature, false);
return intPtrFeature;
}
else
{
return IntPtr.Zero;
}
}
catch
{
return IntPtr.Zero;
}
}
private int InitEnginePool()
{
try
{
for (int index = 0; index < FaceEngineNums; index++)
{
IntPtr enginePtr = IntPtr.Zero;
Arcsoft_Face_Action faceAction = new Arcsoft_Face_Action(AppID, AppKey);
enginePtr = faceAction.InitASFEnginePtr(ParmsBestPractice.faceBaseMask);
PutEngine(FaceEnginePoor, enginePtr);
Console.WriteLine($"FaceEnginePoor add {enginePtr}");
}
for (int index = 0; index < IDEngineNums; index++)
{
IntPtr enginePtr = IntPtr.Zero;
Arcsoft_Face_Action faceAction = new Arcsoft_Face_Action(AppID, AppKey);
enginePtr = faceAction.InitASFEnginePtr(ParmsBestPractice.faceBaseMask);
PutEngine(IDEnginePoor, enginePtr);
Console.WriteLine($"IDEnginePoor add {enginePtr}");
}
for (int index = 0; index < AIEngineNums; index++)
{
IntPtr enginePtr = IntPtr.Zero;
int aiMask = FaceEngineMask.ASF_AGE | FaceEngineMask.ASF_GENDER | FaceEngineMask.ASF_FACE3DANGLE | FaceEngineMask.ASF_LIVENESS;
Arcsoft_Face_Action faceAction = new Arcsoft_Face_Action(AppID, AppKey);
enginePtr = faceAction.InitASFEnginePtr(ParmsBestPractice.faceBaseMask | aiMask);
PutEngine(AIEnginePoor, enginePtr);
Console.WriteLine($"AIEnginePoor add {enginePtr}");
}
return 0;
}
catch (Exception ex)
{
throw new Exception($"InitEnginePool--> exception {ex}");
}
}
public IntPtr GetEngine(ConcurrentQueue<IntPtr> queue)
{
IntPtr item = IntPtr.Zero;
if (queue.TryDequeue(out item))
{
return item;
}
else
{
return IntPtr.Zero;
}
}
public void PutEngine(ConcurrentQueue<IntPtr> queue, IntPtr item)
{
if (item != IntPtr.Zero)
{
queue.Enqueue(item);
}
}
public void Arcsoft_EnginePool(int faceEngineNums = 1, int idEngineNums = 0, int aiEngineNums = 0)
{
FaceEnginePoor = new ConcurrentQueue<IntPtr>();
IDEnginePoor = new ConcurrentQueue<IntPtr>();
AIEnginePoor = new ConcurrentQueue<IntPtr>();
try
{
FaceEngineNums = faceEngineNums;
IDEngineNums = idEngineNums;
AIEngineNums = aiEngineNums;
int status = InitEnginePool();
if (status != 0)
{
throw new Exception("引擎池初始化失敿裥酢!");
}
}
catch (Exception ex)
{
throw new Exception($"ArcSoft_EnginePool-->ArcSoft_EnginePool exception as: {ex}");
}
}
}
public struct ParmsBestPractice
{
//VIDEO模式取值范圍[2,32]莲镣,推薦值為16
public const int detectFaceScaleVal_Video = 16;
//MAGE模式取值范圍[2,32]福稳,推薦值為30
public const int detectFaceScaleVal_Image = 32;
//最大需要檢測的人臉個(gè)數(shù),取值范圍[1,50]
public const int detectFaceMaxNum = 50;
//人臉識(shí)別最基本功能剥悟。
public const int faceBaseMask = FaceEngineMask.ASF_FACE_DETECT | FaceEngineMask.ASF_FACERECOGNITION;
//RGB活體檢測
public const int faceLivingMask = FaceEngineMask.ASF_FACE_DETECT | FaceEngineMask.ASF_FACERECOGNITION | FaceEngineMask.ASF_LIVENESS;
//process可傳入屬性組合只有ASF_AGE 灵寺、ASF_LIVENESS 、ASF_AGE 和 ASF_LIVENESS
public const int processSupportMask = FaceEngineMask.ASF_AGE | FaceEngineMask.ASF_GENDER | FaceEngineMask.ASF_FACE3DANGLE | FaceEngineMask.ASF_LIVENESS;
}
}
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
namespace ArcSoft
{
public class FaceAction : Arcsoft_Face_3_0
{
public FaceAction(string appId, string appKey, ref IntPtr pEngines, bool isImageMode = false, int faceMask = (FaceEngineMask.ASF_FACE_DETECT | FaceEngineMask.ASF_FACERECOGNITION))
{
int retCode = -1;
try
{
retCode = ASFActivation(appId, appKey);
if ((retCode == 0) || (retCode == 90114))
{
}
else
{
throw new Exception("SDK激活失敗区岗,錯(cuò)誤碼:" + retCode);
}
if (isImageMode)
{
retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_ONLY, ParmsBestPractice.detectFaceScaleVal_Image, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
}
else
{
retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_VIDEO, ArcSoftFace_OrientPriority.ASF_OP_0_HIGHER_EXT, ParmsBestPractice.detectFaceScaleVal_Video, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
}
if ((retCode == 0))
{
}
else
{
throw new Exception("SDK初始化失敗略板,錯(cuò)誤碼:" + retCode);
}
}
catch (Exception ex)
{
return;
}
}
public IntPtr InitASFEnginePtr(string appId, string appKey, bool isImageMode = false, int faceMask = (FaceEngineMask.ASF_FACE_DETECT | FaceEngineMask.ASF_FACERECOGNITION))
{
IntPtr pEngines = IntPtr.Zero;
int retCode = -1;
try
{
retCode = ASFOnlineActivation(appId, appKey);
if ((retCode == 0) || (retCode == 90114))
{
}
else
{
throw new Exception("SDK激活失敗,錯(cuò)誤碼:" + retCode);
}
if (isImageMode)
{
retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_ONLY, ParmsBestPractice.detectFaceScaleVal_Image, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
}
else
{
retCode = ASFInitEngine(ASF_DetectMode.ASF_DETECT_MODE_VIDEO, ArcSoftFace_OrientPriority.ASF_OP_0_HIGHER_EXT, ParmsBestPractice.detectFaceScaleVal_Video, ParmsBestPractice.detectFaceMaxNum, faceMask, ref pEngines);
}
if ((retCode == 0))
{
}
else
{
throw new Exception("SDK初始化失敗慈缔,錯(cuò)誤碼:" + retCode);
}
return pEngines;
}
catch (Exception ex)
{
return pEngines;
}
}
public ASF_MultiFaceInfo DetectMultipleFace(IntPtr pEngine, ImageInfo imageInfo)
{
ASF_MultiFaceInfo multiFaceInfo = new ASF_MultiFaceInfo();
IntPtr pMultiFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_MultiFaceInfo>());
try
{
int retCode = ASFDetectFaces(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pMultiFaceInfo, ASF_DetectModel.ASF_DETECT_MODEL_RGB);
multiFaceInfo = Marshal.PtrToStructure<ASF_MultiFaceInfo>(pMultiFaceInfo);
return multiFaceInfo;
}
catch
{
return multiFaceInfo;
}
finally
{
Marshal.FreeHGlobal(pMultiFaceInfo);
}
}
public List<MarkFaceInfor> DetectMultipleFaceAllInformation(IntPtr pEngine, ImageInfo imageInfo, bool extractFaceData = false)
{
List<MarkFaceInfor> infors = new List<MarkFaceInfor>();
ASF_MultiFaceInfo multiFaceInfo = new ASF_MultiFaceInfo();
IntPtr pMultiFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_MultiFaceInfo>());
try
{
int retCode = ASFDetectFaces(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pMultiFaceInfo, ASF_DetectModel.ASF_DETECT_MODEL_RGB);
multiFaceInfo = Marshal.PtrToStructure<ASF_MultiFaceInfo>(pMultiFaceInfo);
for (int faceIndex = 0; faceIndex < multiFaceInfo.faceNum; faceIndex++)
{
ASF_SingleFaceInfo singleFaceInfo = new ASF_SingleFaceInfo();
singleFaceInfo.faceRect = Marshal.PtrToStructure<MRECT>(multiFaceInfo.faceRects + Marshal.SizeOf<MRECT>() * faceIndex);
singleFaceInfo.faceOrient = Marshal.PtrToStructure<int>(multiFaceInfo.faceOrients + Marshal.SizeOf<int>() * faceIndex);
MarkFaceInfor markFaceInfor = new MarkFaceInfor(singleFaceInfo.faceRect.left, singleFaceInfo.faceRect.top, singleFaceInfo.faceRect.right - singleFaceInfo.faceRect.left, singleFaceInfo.faceRect.bottom - singleFaceInfo.faceRect.top);
markFaceInfor.faceID = Marshal.PtrToStructure<int>(multiFaceInfo.faceID + Marshal.SizeOf<int>() * faceIndex);
if (extractFaceData)
{
markFaceInfor.faceFeatureData = ExtractSingleFaceFeature(pEngine, imageInfo, singleFaceInfo.faceRect, singleFaceInfo.faceOrient);
}
infors.Add(markFaceInfor);
}
return infors;
}
catch
{
return null;
}
finally
{
Marshal.FreeHGlobal(pMultiFaceInfo);
}
}
private byte[] ExtractSingleFaceFeature(IntPtr pEngine, ImageInfo imageInfo, MRECT rect, int faceOrient)
{
var singleFaceInfo = new ASF_SingleFaceInfo();
singleFaceInfo.faceRect = rect;
singleFaceInfo.faceOrient = faceOrient;
IntPtr pSingleFaceInfo = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_SingleFaceInfo>());
Marshal.StructureToPtr(singleFaceInfo, pSingleFaceInfo, false);
IntPtr pFaceFeature = Marshal.AllocHGlobal(Marshal.SizeOf<ASF_FaceFeature>());
try
{
int retCode = ASFFaceFeatureExtract(pEngine, imageInfo.width, imageInfo.height, imageInfo.format, imageInfo.imgData, pSingleFaceInfo, pFaceFeature);
if (retCode == 0)
{
ASF_FaceFeature faceFeature = Marshal.PtrToStructure<ASF_FaceFeature>(pFaceFeature);
byte[] feature = new byte[faceFeature.featureSize];
Marshal.Copy(faceFeature.feature, feature, 0, faceFeature.featureSize);
return feature;
}
else
{
return null;
}
}
catch
{
return null;
}
finally
{
Marshal.FreeHGlobal(pSingleFaceInfo);
Marshal.FreeHGlobal(pFaceFeature);
}
}
public List<byte[]> ExtractAllFeatures(IntPtr pEngine, ImageInfo imageInfo, ASF_MultiFaceInfo multiFaceInfo)
{
try
{
List<byte[]> results = new List<byte[]>();
ASF_SingleFaceInfo singleFaceInfo = new ASF_SingleFaceInfo();
for (int index = 0; index < multiFaceInfo.faceNum; index++)
{
singleFaceInfo.faceRect = Marshal.PtrToStructure<MRECT>(multiFaceInfo.faceRects + Marshal.SizeOf<MRECT>() * index);
singleFaceInfo.faceOrient = Marshal.PtrToStructure<int>(multiFaceInfo.faceOrients + Marshal.SizeOf<int>() * index);
byte[] singleFaceFeature = ExtractSingleFaceFeature(pEngine, imageInfo, singleFaceInfo.faceRect, singleFaceInfo.faceOrient);
if (singleFaceFeature != null)
{
results.Add(singleFaceFeature);
}
}
return results;
}
catch (Exception ex)
{
throw new Exception("Arcsoft2-->ExtractAllFeatures exception " + ex);
}
finally
{
Marshal.FreeHGlobal(imageInfo.imgData);
}
}
public IntPtr GetBMP_Ptr(Bitmap image, out int width, out int height, out int pitch)
{
width = -1;
height = -1;
pitch = -1;
byte[] imageData = ReadBMP(image, ref width, ref height, ref pitch);
IntPtr imageDataPtr = Marshal.AllocHGlobal(imageData.Length);
Marshal.Copy(imageData, 0, imageDataPtr, imageData.Length);
return imageDataPtr;
}
public byte[] ReadBMP(Bitmap image, ref int width, ref int height, ref int pitch)
{
//將Bitmap鎖定到系統(tǒng)內(nèi)存中,獲得BitmapData
BitmapData data = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
//位圖中第一個(gè)像素?cái)?shù)據(jù)的地址叮称。它也可以看成是位圖中的第一個(gè)掃描行
IntPtr ptr = data.Scan0;
//定義數(shù)組長度
int soureBitArrayLength = data.Height * Math.Abs(data.Stride);
byte[] sourceBitArray = new byte[soureBitArrayLength];
//將bitmap中的內(nèi)容拷貝到ptr_bgr數(shù)組中
Marshal.Copy(ptr, sourceBitArray, 0, soureBitArrayLength); width = data.Width;
height = data.Height;
pitch = Math.Abs(data.Stride);
int line = width * 3;
int bgr_len = line * height;
byte[] destBitArray = new byte[bgr_len];
for (int i = 0; i < height; ++i)
{
Array.Copy(sourceBitArray, i * pitch, destBitArray, i * line, line);
}
pitch = line;
image.UnlockBits(data);
return destBitArray;
}
public ASVLOFFSCREEN ChangeMat2ASVLOFFSCREEN(Bitmap image)
{
int width = -1;
int height = -1;
int pitch = -1;
IntPtr imagePtr = GetBMP_Ptr(image, out width, out height, out pitch);
ASVLOFFSCREEN offInput = new ASVLOFFSCREEN();
offInput.u32PixelArrayFormat = 513;
offInput.ppu8Plane = new IntPtr[4];
offInput.ppu8Plane[0] = imagePtr;
offInput.i32Width = width;
offInput.i32Height = height;
offInput.pi32Pitch = new int[4];
offInput.pi32Pitch[0] = pitch;
return offInput;
}
}
}
- 客戶端多人特征提取與推流
private async void DetectFaceTick(object sender, ElapsedEventArgs e)
{
Mat currentMat;
lock (_obj)
{
currentMat = mat;
}
List<MarkFaceInfor> markFaceInfors = ExtractFaceData(currentMat, _enginePool);
if (markFaceInfors == null)
{
return;
}
if (markFaceInfors.Count==0)
{
return;
}
while(!_complete)
{
Task.Delay(10).Wait();
}
_complete = false;
var regFace = _client.RecongnizationByFace();
//定義接收響應(yīng)邏輯
var regFaceResponseTask = Task.Run(async () =>
{
WriteReceiveMsgAsync(string.Format("當(dāng)前接收時(shí)間{0}", DateTime.Now.ToString("HH:mm:ss:fff")));
await foreach (var resp in regFace.ResponseStream.ReadAllAsync())
{
WriteReceiveMsgAsync($"姓名:{resp.PersonName},相似度:{resp.ConfidenceLevel}");
}
});
//開始調(diào)用
WriteSendMsgAsync(string.Format("開始發(fā)送時(shí)間{0}", DateTime.Now.ToString("HH:mm:ss:fff")));
for (int index = 0; index < markFaceInfors.Count; index++)
{
WriteSendMsgAsync($"發(fā)送編號(hào):{index}");
await regFace.RequestStream.WriteAsync(new FaceRequest()
{
FaceFeature = Google.Protobuf.ByteString.CopyFrom(markFaceInfors[index].faceFeatureData)
});
}
await regFace.RequestStream.CompleteAsync();
//等待結(jié)果
await regFaceResponseTask;
_complete = true;
}
- 服務(wù)端多人特征判斷與返回
public override async Task RecongnizationByFace(IAsyncStreamReader<FaceRequest> requestStream, IServerStreamWriter<FaceReply> responseStream, ServerCallContext context)
{
var faceQueue = new Queue<Google.Protobuf.ByteString>();
IntPtr featurePoint = IntPtr.Zero;
IntPtr engine = FaceProcess.GetEngine(FaceProcess.FaceEnginePoor);
FaceReply faceReply = new FaceReply();
while (await requestStream.MoveNext())
{
//識(shí)別業(yè)務(wù)
byte[] featureByte = requestStream.Current.FaceFeature.ToByteArray();
if (featureByte.Length != 1032)
{
continue;
}
featurePoint = Arcsoft_Face_Action.PutFeatureByteIntoFeatureIntPtr(featureByte);
float maxScore = 0f;
while (engine == IntPtr.Zero)
{
Task.Delay(10).Wait();
engine = FaceProcess.GetEngine(FaceProcess.IDEnginePoor);
}
foreach (var f in StaticDataForTestUse.dbFaceInfor)
{
float result = 0;
int compareStatus = Arcsoft_Face_3_0.ASFFaceFeatureCompare(engine, featurePoint, f.Key, ref result, 1);
if (compareStatus == 0)
{
if (result >= maxScore)
{
maxScore = result;
}
if (result >= _faceMix && result >= maxScore)
{
faceReply.PersonName = f.Value;
faceReply.ConfidenceLevel = result;
}
}
else
{
faceReply.PersonName = $"對(duì)比異常 error code={compareStatus}";
faceReply.ConfidenceLevel = result;
}
}
if (maxScore < _faceMix)
{
faceReply.PersonName = $"未找到匹配者";
faceReply.ConfidenceLevel = maxScore;
}
Marshal.FreeHGlobal(featurePoint);
await responseStream.WriteAsync(faceReply);
}
FaceProcess.PutEngine(FaceProcess.FaceEnginePoor, engine);
}
六藐鹤,效果演示
-
客戶端:
r1.png
-
服務(wù)端:
2.png
七瓤檐,源代碼與使用
源代碼公布在GitHub上
https://github.com/18628271760/MultipleFacesProcess
具體操作:
詳見 項(xiàng)目 ReadMe.docx(圖文并茂哦!)