C# 视频监控系列(7):服务器端——封装API(下) [DS40xxSDK.dll] (上)
2009-04-08 08:23:53 来源:WEB开发网前言
写系列文章的时候[前言]部分变得无言了,可能来得顺利了点吧: ) 本章中提供的封装均是我用笨办法从<<Hikvision 板卡网络开发包编程手册V4.7>>和<<DS-4000HC、HCS、HC+、HF、HS、MD卡的Windows编程指南V4.3>>中拷贝出来并参照VC++代码进行整理的,主要是针对HikServer.dll和DS40xxSDK.dll的调用封装。
正文
1. DS40xxSDK.dll
using System;
using System.Collections.Generic;
using System.Text;
using System.Runtime.InteropServices;
using System.Drawing;
namespace HikServer.DS40xxSDK
{
#region enum
/// <summary>
/// 板卡类型
/// </summary>
public enum BOARD_TYPE_DS : uint
{
DS400XM = 0, //M卡
DS400XH = 1, //H卡
DS4004HC = 2, //4004HC
DS4008HC = 3, //4008HC
DS4016HC = 4, //4016HC
DS4001HF = 5, //4001HF
DS4004HF = 6, //4004HF
DS4002MD = 7, //4002MD
DS4004MD = 8, //4004MD
DS4016HCS = 9, //4016HCS
DS4002HT = 10, //4002HT
DS4004HT = 11, //4004HT
DS4008HT = 12, //4008HT
DS4004HC_PLUS = 13, //4004HC+
DS4008HC_PLUS = 14, //4008HC+
DS4016HC_PLUS = 15, //4016HC+
DS4008HF = 16, //4008HF
DS4008MD = 17, //4008MD
DS4008HS = 18, //4008HS
DS4016HS = 19, //4016HS
INVALID_BOARD_TYPE = 0xffffffff,
}
/// <summary>
/// 视频预览格式
/// </summary>
public enum TypeVideoFormat
{
vdfRGB8A_233 = 0x00000001,
vdfRGB8R_332 = 0x00000002,
vdfRGB15Alpha = 0x00000004,
/// <summary>
/// 16位RGB视频压缩格式
/// </summary>
vdfRGB16 = 0x00000008,
/// <summary>
/// 24位RGB视频压缩格式
/// </summary>
vdfRGB24 = 0x00000010,
vdfRGB24Alpha = 0x00000020,
vdfYUV420Planar = 0x00000040,
/// <summary>
/// YUV422视频压缩格式
/// </summary>
vdfYUV422Planar = 0x00000080,
vdfYUV411Planar = 0x00000100,
vdfYUV420Interspersed = 0x00000200,
vdfYUV422Interspersed = 0x00000400,
vdfYUV411Interspersed = 0x00000800,
vdfYUV422Sequence = 0x00001000, /* U0, Y0, V0, Y1: For VO overlay */
vdfYUV422SequenceAlpha = 0x00002000,
/* U0, Y0, V0, Y1: For VO overlay, with low bit for alpha blending */
vdfMono = 0x00004000, /* 8 bit monochrome */
vdfYUV444Planar = 0x00008000,
};
/// <summary>
/// 视频制式
/// </summary>
public enum VideoStandard_t : uint
{
/// <summary>
/// 无视频信号
/// </summary>
StandardNone = 0x80000000,
/// <summary>
/// NTSC制式
/// </summary>
StandardNTSC = 0x00000001,
/// <summary>
/// PAL制式
/// </summary>
StandardPAL = 0x00000002,
StandardSECAM = 0x00000004,
} ;
/// <summary>
/// 编码图像分辨率
/// </summary>
public enum PictureFormat_t
{
ENC_CIF_FORMAT = 0,
ENC_QCIF_FORMAT = 1,
ENC_2CIF_FORMAT = 2,
ENC_4CIF_FORMAT = 3,
ENC_QQCIF_FORMAT = 4,
ENC_CIFQCIF_FORMAT = 5,
ENC_CIFQQCIF_FORMAT = 6,
ENC_DCIF_FORMAT = 7
};
/// <summary>
/// 码流控制方式
/// </summary>
public enum BitrateControlType_t
{
/// <summary>
/// 变码率
/// </summary>
brCBR = 0,
/// <summary>
/// 恒定码率
/// </summary>
brVBR = 1,
};
public enum FrameType_t
{
PktError = 0,
PktIFrames = 0x0001,
PktPFrames = 0x0002,
PktBBPFrames = 0x0004,
PktAudioFrames = 0x0008,
PktMotionDetection = 0x00010,
PktDspStatus = 0x00020,
PktOrigImage = 0x00040,
PktSysHeader = 0x00080,
PktBPFrames = 0x00100,
PktSFrames = 0x00200,
PktSubIFrames = 0x00400,
PktSubPFrames = 0x00800,
PktSubBBPFrames = 0x01000,
PktSubSysHeader = 0x02000
};
#endregion
#region struct
/// <summary>
/// 板卡信息结构体
/// </summary>
public struct DS_BOARD_DETAIL
{
/// <summary>
/// 板卡类型
/// </summary>
BOARD_TYPE_DS type;
/// <summary>
/// 序列号
/// BYTE sn[16];
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)]
byte[] sn;
/// <summary>
/// 板卡包含的DSP个数
/// </summary>
uint dspCount;
/// <summary>
/// 板卡上第一个DSP的索引
/// </summary>
uint firstDspIndex;
/// <summary>
/// 板卡包含的编码通道个数
/// </summary>
uint encodeChannelCount;
/// <summary>
/// 板卡上第一个编码通道的索引
/// </summary>
uint firstEncodeChannelIndex;
/// <summary>
/// 板卡包含的解码通道个数
/// </summary>
uint decodeChannelCount;
/// <summary>
/// 板卡上第一个解码通道的索引
/// </summary>
uint firstDecodeChannelIndex;
/// <summary>
/// 板卡包含的视频输出通道个数
/// </summary>
uint displayChannelCount;
/// <summary>
/// 板卡上第一个视频输出通道的索引
/// </summary>
uint firstDisplayChannelIndex;
uint reserved1;
uint reserved2;
uint reserved3;
/// <summary>
/// 硬件版本,format:major.minor.build,major:bit 16-19,minor: bit 8-15,build: bit 0-7
/// </summary>
uint version;
}
/// <summary>
/// DSP信息结构体
/// </summary>
public struct DSP_DETAIL
{
/// <summary>
/// 此DSP所包含的编码通道个数
/// </summary>
uint encodeChannelCount;
/// <summary>
/// 此DSP上第一个编码通道在所有编码通道中的索引
/// </summary>
uint firstEncodeChannelIndex;
/// <summary>
/// 此DSP所包含的解码通道个数
/// </summary>
uint decodeChannelCount;
/// <summary>
/// 此DSP上第一个解码通道在所有解码通道中的索引
/// </summary>
uint firstDecodeChannelIndex;
/// <summary>
/// 此DSP包含的显示通道个数
/// </summary>
uint displayChannelCount;
/// <summary>
/// 此DSP上第一个显示通道在所有显示通道中的索引
/// </summary>
uint firstDisplayChannelIndex;
uint reserved1;
uint reserved2;
uint reserved3;
uint reserved4;
}
/// <summary>
/// 特殊功能结构体
/// </summary>
public struct CHANNEL_CAPABILITY
{
/// <summary>
/// 音频预览
/// </summary>
byte[] bAudioPreview;
/// <summary>
/// 报警信号
/// </summary>
byte[] bAlarmIO;
/// <summary>
/// 看家狗
/// </summary>
byte[] bWatchDog;
}
/// <summary>
/// 版本信息
/// </summary>
public struct PVERSION_INFO
{
/// <summary>
/// DSP版本号,DSP的BUILD号,用于软件升级时标明该版本的最后修改时间
/// </summary>
ulong DspVersion, DspBuildNum;
/// <summary>
/// Driver版本号,Driver的BUILD号,用于软件升级时标明该版本的最后修改时间
/// </summary>
ulong DriverVersion, DriverBuildNum;
/// <summary>
/// SDK版本号,SDK的BUILD号,用于软件升级时标明该版本的最后修改时间
/// </summary>
ulong SDKVersion, SDKBuildNum;
}
/// <summary>
/// 显示窗口内的矩形区域
/// </summary>
//[StructLayout(LayoutKind.
//public struct RECT
//{
// public long left;
// public long top;
// public long right;
// public long bottom;
//}
/// <summary>
/// 帧统计信息结构体
/// </summary>
public struct PFRAMES_STATISTICS
{
/// <summary>
/// 视频帧
/// </summary>
ulong VideoFrames;
/// <summary>
/// 音频帧
/// </summary>
ulong AudioFrames;
/// <summary>
/// 丢失帧
/// </summary>
ulong FramesLost;
/// <summary>
/// 丢失的码流(字节)
/// </summary>
ulong QueueOverflow;
/// <summary>
/// 当前的帧率(bps)
/// </summary>
ulong CurBps;
}
/// <summary>
/// 版本信息结构体
/// </summary>
public struct PHW_VERSION
{
/// <summary>
/// DSP程序的版本号和Build号
/// </summary>
ulong DspVersion, DspBuildNum;
/// <summary>
/// 驱动程序的版本号和Build号
/// </summary>
ulong DriverVersion, DriverBuildNum;
/// <summary>
/// SDK 的版本号和Build号
/// </summary>
ulong SDKVersion, SDKBuildNum;
}
/// <summary>
/// 系统时间
/// </summary>
public struct SYSTEMTIME
{
ushort wYear;
ushort wMonth;
ushort wDayOfWeek;
ushort wDay;
ushort wHour;
ushort wMinute;
ushort wSecond;
ushort wMilliseconds;
}
#endregion
#region delegate
/// <summary>
/// 原始图像流设置
///
/// typedef void (*IMAGE_STREAM_CALLBACK)(UINT channelNumber,void *context);
/// </summary>
/// <param name="channelNumber">通道号</param>
/// <param name="context">设备上下文</param>
public delegate void IMAGE_STREAM_CALLBACK(uint channelNumber, IntPtr context);
/// <summary>
/// 编码数据流直接读取回调函数
///
/// typedef int (*STREAM_DIRECT_READ_CALLBACK)(ULONG channelNumber,void *DataBuf,DWORD Length,int FrameType,void *context);
/// </summary>
/// <param name="channelNumber">通道号</param>
/// <param name="DataBuf">缓冲区地址</param>
/// <param name="Length">缓冲区长度</param>
/// <param name="FrameType">缓冲区数据帧类型</param>
/// <param name="context">设备上下文</param>
/// <returns></returns>
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
//public delegate int STREAM_DIRECT_READ_CALLBACK(uint channelNumber, byte[] DataBuf, uint Length, FrameType_t FrameType, IntPtr context);
public delegate int STREAM_DIRECT_READ_CALLBACK(int channelNumber, IntPtr DataBuf, int Length, FrameType_t FrameType, IntPtr context);
//public unsafe delegate int STREAM_DIRECT_READ_CALLBACK(uint channelNumber, void * DataBuf, uint Length, int FrameType, IntPtr context);
/// <summary>
/// 直接读取码流回调函数
///
/// typedef int (*STREAM_READ_CALLBACK)(ULONG channelNumber, void *context)
/// </summary>
/// <param name="channelNumber">通道号</param>
/// <param name="context">设备上下文</param>
/// <returns></returns>
public delegate int STREAM_READ_CALLBACK(ulong channelNumber, IntPtr context);
/// <summary>
/// 移动侦测结果回调函数
///
/// typedef void (*MOTION_DETECTION_CALLBACK)(ULONG channelNumber, BOOL bMotionDetected,void *context)
/// </summary>
/// <param name="channelNumber">通道号</param>
/// <param name="bMotionDetected">
/// 移动侦测发生标志,如果当前通道所设置的移动侦测
/// 区域内产生了移动侦测,则被置为True;如果当前通道所设置的移动侦测区域内自上
/// 一次产生移动侦测后delay秒内没有发生移动侦测,则被置为False。
/// </param>
/// <param name="context">设备上下文</param>
public delegate void MOTION_DETECTION_CALLBACK(ulong channelNumber, bool bMotionDetected, IntPtr context);
/// <summary>
/// 画图回调函数
///
/// #define DRAWFUN(x) void (CALLBACK* x)(long nPort,HDC hDc,LONG nUser)
/// </summary>
/// <param name="nPort">通道号</param>
/// <param name="HDC">offscreen表面设备上下文,相当于显示窗口中的DC</param>
/// <param name="nUser">用户数据</param>
public delegate void DrawFun(long nPort, IntPtr HDC, long nUser);
/// <summary>
/// 解码回调函数
///
/// typedef void (*DECODER_VIDEO_CAPTURE_CALLBACK)(UINT nChannelNumber, void *DataBuf,UINT width,UINT height,UINT nFrameNum,UINT nFrameTime, SYSTEMTIME *pFrameAbsoluteTime,void *context)
/// </summary>
/// <param name="nChannelNumber">解码通道句柄</param>
/// <param name="DataBuf">缓冲区地址</param>
/// <param name="width">图像宽度</param>
/// <param name="height">图像高度</param>
/// <param name="nFrameNum">捕获的当前帧的序号</param>
/// <param name="nFrameTime">捕获的当前帧的相对时间,单位:毫秒</param>
/// <param name="pFrameAbsoluteTime">捕获的当前帧的绝对时间</param>
/// <param name="context">设备上下文</param>
public delegate void DECODER_VIDEO_CAPTURE_CALLBACK(uint nChannelNumber, IntPtr DataBuf, uint width, uint height, uint nFrameNum, uint nFrameTime, SYSTEMTIME pFrameAbsoluteTime, IntPtr context);
/// <summary>
/// 创建索引完成回调函数
///
/// typedef void (*FILE_REF_DONE_CALLBACK)(UINT nChannel,UINT nSize)
/// </summary>
/// <param name="nChannel">通道号</param>
/// <param name="nSize">索引大小(暂时无效,以后可以增加索引导出、导入功能) </param>
public delegate void FILE_REF_DONE_CALLBACK(uint nChannel, uint nSize);
#endregion
/// <summary>
/// DS40xxSDK.dll
/// </summary>
public class HikVisionSDK
{
/// <summary>
/// 状态
/// </summary>
public static readonly List<string> state = new List<string>(new string[]{
"", "正在打开", "音频信号丢失", "视频信号丢失", "有物体移动", //0-4
"自动分割录像", "开始录像", "停止录像", "启动声音监听", "停止声音监听", //5-9
"启动视频预览", "停止视频预览", "启动录像", "停止录像", "启动视频报警", //10-14
"关闭视频报警", "启动音频报警", "停止音频报警", "启动移动侦测", "停止移动侦测", //15-19
"启动视频遮挡", "关闭视频遮挡", "开始屏幕输出", "停止屏幕输出", "启动视频LOGO", //20-24
"停止视频LOGO", "开始视频OSD", "停止视频OSD", "切换为黑白视频", "切换为彩色视频", //25-29
"切换为黑屏显示", "切换为白屏显示", "视频色彩复位", "启动全屏显示", "采集卡已经加载", //30-34
"采集卡已经卸截", "视频服务启动成功", "视频服务已停止", "静音", "音量恢复", //35-39
"云台控制命令发送", "系统出现未知错误", "录像文件大小", "配置端口号成功", "连接服务端成功", //40-44
"正在连接", "开始接收图象", "异常退出", "接收完毕,退出", "无法联系服务端", //45-49
"服务端拒绝访问", "无效", "停止客户端连接", "图像抓取成功", "初始化服务端网络连接成功", //50-54
"视频服务启动失败", "退出全屏预览", "", "", "" //55-59
});
//可以用新版函数替代功能或者无效的API
//GetTotalChannels:可用GetEncodeChannelCount替代
//GetTotalDSPs:可用GetDspCount 替代
//SetupDateTime:4.0版本起无效
//HW_GetChannelNum:无效,请使用GetBoardDetail
//HW_GetDeviceSerialNo:无效,请使用GetBoardDetail
//HW_SetVideoOutStandard:无效,请使用SetDisplayStandard或SetDefaultVideoStandard
//HW_SetDspDeadlockMsg:无效
//HW_ResetDsp:无效
//HW_SetDisplayPara:DISPLAY_PARA结构中bToVideoOut无效,MD卡模拟视频输出功能
//已经整合到视频矩阵之中。
#region 流类型宏定义
/// <summary>
/// 视频流
/// #define STREAM_TYPE_VIDEO
/// </summary>
private const int STREAM_TYPE_VIDEO = 1;
/// <summary>
/// 音频流
/// #define STREAM_TYPE_AUDIO
/// </summary>
private const int STREAM_TYPE_AUDIO = 2;
/// <summary>
/// 音视频复合流
/// #define STREAM_TYPE_AVSYNC
/// </summary>
private const int STREAM_TYPE_AVSYNC = 3;
#endregion
更多精彩
赞助商链接