1.先下载海康的MVS客户端,下载安装后路径下会自动有相应的安装包,包括头文件以及lib库文件
2.在pro项目文件里面配置库文件以及头文件
INCLUDEPATH += $$PWD/Hik_Includes
DEPENDPATH += $$PWD/Hik_Includes
LIBS += -L$$PWD/Libs/ -lMvCameraControl
也可以配置opencv或者与PLC的snap7通讯(如果需要)
win32:CONFIG(release, debug|release): LIBS += -LD:/MY_Software/opencv4.5.1/opencv4.5.1/opencv/build/x64/vc14/lib/ -lopencv_world451
else:win32:CONFIG(debug, debug|release): LIBS += -LD:/MY_Software/opencv4.5.1/opencv4.5.1/opencv/build/x64/vc14/lib/ -lopencv_world451d
INCLUDEPATH += D:/MY_Software/opencv4.5.1/opencv4.5.1/opencv/build/include
DEPENDPATH += D:/MY_Software/opencv4.5.1/opencv4.5.1/opencv/build/include
win32:CONFIG(release, debug|release): LIBS += -L$$PWD/./release/ -lsnap7
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/./debug/ -lsnap7
else:unix: LIBS += -L$$PWD/./ -lsnap7
INCLUDEPATH += $$PWD/''
DEPENDPATH += $$PWD/''
3.相机与电脑连接
我的相机是通过一个USB-以太网和电脑相连,需要改电脑端口IP,相机IP在MVS客户端上面改就可以了,改电脑IP可以参考我的基于Qt和C++的海康网络摄像头SDK开发,详细步骤_海康威视摄像头sdk-CSDN博客
为了方便相机连接我也将两个相机改了名字,yzw123和ocr456。在MVS可以试试看看相机拍摄的画面,在qt里面调试程序记得把MVS关了,要不然会占用相机导致qt程序无法调用该相机
4.我这里写了camerainterface和hikvisionsdk来实现连接调用相机,并抓取画面。程序是借鉴别人的。
camerainterface.h
#ifndef CAMERAINTERFACE_H
#define CAMERAINTERFACE_H
#include"opencv2/opencv.hpp"
#include"iostream"
class CameraInterface
{
public:
CameraInterface();
~CameraInterface();
// ch:连接设备
virtual int connectCamera(std::string id)=0;
//设置是否为触发模式
virtual int setTriggerMode(unsigned int TriggerModeNum)=0;
//开启相机采集
virtual int startCamera()=0;
//关闭相机采集
virtual int stopCamera()=0;
//关闭相机
virtual int closeCamera()=0;
//软触发
virtual int softTrigger()=0;
//读取buffer
virtual int ReadBuffer(cv::Mat &image)=0;
//设置图像高度
virtual int setHeight(unsigned int height)=0;
//设置图像ROI宽度
virtual int setWidth(unsigned int width)=0;
//获取图像高度值
virtual int getHeight()=0;
//获取图像宽度值
virtual int getWidth()=0;
//获取相机曝光时间
virtual float getExposureTime()=0;
//设置图像水平偏移OffsetX
virtual int setOffsetX(unsigned int offsetX)=0;
//设置图像竖直偏移OffsetY
virtual int setOffsetY(unsigned int offsetY)=0;
//设置触发源
virtual int setTriggerSource(unsigned int TriggerSourceNum)=0;
//设置帧率控制使能
virtual int setFrameRateEnable(bool comm)=0;
//设置心跳时间
virtual int setHeartBeatTime(unsigned int time)=0;
//设置曝光时间
virtual int setExposureTime(float ExposureTimeNum)=0;
//关闭自动曝光
virtual int setExposureAuto(bool exposureAutoFlag)=0;
//关闭自动增益
virtual int setGainAuto(bool gainAutoFlag)=0;
//清理相机缓存
virtual void clearBuffer()=0;
};
#endif // CAMERAINTERFACE_H
camerainterface.cpp
#include "camerainterface.h"
CameraInterface::CameraInterface()
{
}
CameraInterface::~CameraInterface()
{
}
hikvisionsdk.h
#ifndef HIKVISIONSDK_H
#define HIKVISIONSDK_H
#include"camerainterface.h"
#include"MvCameraControl.h"
using namespace cv;
class HikvisionSDK:public CameraInterface
{
public:
HikvisionSDK();
~HikvisionSDK();
// ch:连接设备
int connectCamera(std::string id);
//设置是否为触发模式
int setTriggerMode(unsigned int TriggerModeNum);
//开启相机采集
int startCamera();
//关闭相机采集
int stopCamera();
//关闭相机
int closeCamera();
//软触发
int softTrigger();
//读取buffer
int ReadBuffer(cv::Mat &image);
//设置图像高度
int setHeight(unsigned int height);
//设置图像ROI宽度
int setWidth(unsigned int width);
//获取图像高度值
int getHeight();
//获取图像宽度值
int getWidth();
//获取相机曝光时间
float getExposureTime();
//设置图像水平偏移OffsetX
int setOffsetX(unsigned int offsetX);
//设置图像竖直偏移OffsetY
int setOffsetY(unsigned int offsetY);
//设置触发源
int setTriggerSource(unsigned int TriggerSourceNum);
//设置帧率控制使能
int setFrameRateEnable(bool comm);
//设置心跳时间
int setHeartBeatTime(unsigned int time);
//设置曝光时间
int setExposureTime(float ExposureTimeNum);
//关闭自动曝光
int setExposureAuto(bool exposureAutoFlag);
//关闭自动增益
int setGainAuto(bool gainAutoFlag);
//清理相机缓存
void clearBuffer();
// // 获取相机IP地址
// std::string getCameraIpAddress();
private:
void* m_hDevHandle;
public:
// 用于保存图像的缓存
unsigned int m_nBufSizeForSaveImage;
// 用于从驱动获取图像的缓存
unsigned int m_nBufSizeForDriver;
};
#endif // HIKVISIONSDK_H
hikvisonsdk.cpp
#include "hikvisionsdk.h"
#include <MvCameraControl.h>
MV_CC_DEVICE_INFO_LIST m_stDevList; // ch:设备信息列表结构体变量,用来存储设备列表
MV_CC_DEVICE_INFO* m_Device=NULL; //设备对象
HikvisionSDK::HikvisionSDK()
{
m_hDevHandle = NULL;
}
HikvisionSDK::~HikvisionSDK()
{
if (m_hDevHandle)
{
MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = NULL;
}
}
//连接相机
int HikvisionSDK::connectCamera(std::string id)
{
//查询设备列表
int tempValue=MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE, &m_stDevList);
if(tempValue!=0)
//设备更新成功接收命令的返回值为0,返回值不为0则为异常
return -1;
if(m_stDevList.nDeviceNum==0)
//未找到任何相机
return 2;
for (unsigned int i = 0; i < m_stDevList.nDeviceNum; i++)
{
MV_CC_DEVICE_INFO* pDeviceInfo = m_stDevList.pDeviceInfo[i];
if (NULL == pDeviceInfo)
{
continue;
}
if(pDeviceInfo->nTLayerType==MV_USB_DEVICE)
{
if(id== (char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chSerialNumber||id== (char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName||id== (char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chSerialNumber||id=="PECVD")
{
m_Device= m_stDevList.pDeviceInfo[i];
break;
}
}
if(pDeviceInfo->nTLayerType==MV_GIGE_DEVICE)
{
if(id== (char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chSerialNumber||id== (char*)pDeviceInfo->SpecialInfo.stGigEInfo.chUserDefinedName||id== (char*)pDeviceInfo->SpecialInfo.stGigEInfo.chSerialNumber||id=="PECVD")
{
m_Device= m_stDevList.pDeviceInfo[i];
break;
}
}
continue;
}
if(m_Device==NULL)
{
//未找到指定名称的相机
return 3;
}
tempValue = MV_CC_CreateHandle(&m_hDevHandle, m_Device);
if(tempValue!=0)
return -1;
tempValue = MV_CC_OpenDevice(m_hDevHandle);
if (tempValue!=0)
{
MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = NULL;
return -1;
}else
{
setTriggerMode(1);
return 0;
}
if (m_Device->nTLayerType == MV_GIGE_DEVICE)
{
}
if(m_Device->nTLayerType == MV_USB_DEVICE)
{
MV_CC_SetImageNodeNum(&m_hDevHandle,2);
}
}
//启动相机采集
int HikvisionSDK::startCamera()
{
int tempValue=MV_CC_StartGrabbing(m_hDevHandle);
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//停止相机采集
int HikvisionSDK::stopCamera()
{
int tempValue=MV_CC_StopGrabbing(m_hDevHandle);
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//关闭相机
int HikvisionSDK::closeCamera()
{
if (NULL == m_hDevHandle)
{
return -1;
}
MV_CC_CloseDevice(m_hDevHandle);
int tempValue = MV_CC_DestroyHandle(m_hDevHandle);
m_hDevHandle = NULL;
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//发送软触发
int HikvisionSDK::softTrigger()
{
int tempValue= MV_CC_SetCommandValue(m_hDevHandle, "TriggerSoftware");
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//读取相机中的图像
int HikvisionSDK::ReadBuffer(Mat &image)
{
Mat* getImage=new Mat();
unsigned int nRecvBufSize = 0;
MVCC_INTVALUE stParam;
memset(&stParam, 0, sizeof(MVCC_INTVALUE));
int tempValue = MV_CC_GetIntValue(m_hDevHandle, "PayloadSize", &stParam);
if (tempValue != 0)
{
return -1;
}
nRecvBufSize = stParam.nCurValue;
unsigned char* pDate;
pDate=(unsigned char *)malloc(nRecvBufSize);
MV_FRAME_OUT_INFO_EX stImageInfo = {0};
tempValue= MV_CC_GetOneFrameTimeout(m_hDevHandle, pDate, nRecvBufSize, &stImageInfo, 700);
if(tempValue!=0)
{
return -1;
}
m_nBufSizeForSaveImage = stImageInfo.nWidth * stImageInfo.nHeight * 3 + 2048;
unsigned char* m_pBufForSaveImage;
m_pBufForSaveImage = (unsigned char*)malloc(m_nBufSizeForSaveImage);
bool isMono;
switch (stImageInfo.enPixelType)
{
case PixelType_Gvsp_Mono8:
case PixelType_Gvsp_Mono10:
case PixelType_Gvsp_Mono10_Packed:
case PixelType_Gvsp_Mono12:
case PixelType_Gvsp_Mono12_Packed:
isMono=true;
break;
default:
isMono=false;
break;
}
if(isMono)
{
*getImage=Mat(stImageInfo.nHeight,stImageInfo.nWidth,CV_8UC1,pDate);
//imwrite("d:\\测试opencv_Mono.tif", image);
}
else
{
//转换图像格式为BGR8
MV_CC_PIXEL_CONVERT_PARAM stConvertParam = {0};
memset(&stConvertParam, 0, sizeof(MV_CC_PIXEL_CONVERT_PARAM));
stConvertParam.nWidth = stImageInfo.nWidth; //ch:图像宽 | en:image width
stConvertParam.nHeight = stImageInfo.nHeight; //ch:图像高 | en:image height
//stConvertParam.pSrcData = m_pBufForDriver; //ch:输入数据缓存 | en:input data buffer
stConvertParam.pSrcData = pDate; //ch:输入数据缓存 | en:input data buffer
stConvertParam.nSrcDataLen = stImageInfo.nFrameLen; //ch:输入数据大小 | en:input data size
stConvertParam.enSrcPixelType = stImageInfo.enPixelType; //ch:输入像素格式 | en:input pixel format
stConvertParam.enDstPixelType = PixelType_Gvsp_BGR8_Packed; //ch:输出像素格式 | en:output pixel format 适用于OPENCV的图像格式
//stConvertParam.enDstPixelType = PixelType_Gvsp_RGB8_Packed; //ch:输出像素格式 | en:output pixel format
stConvertParam.pDstBuffer = m_pBufForSaveImage; //ch:输出数据缓存 | en:output data buffer
stConvertParam.nDstBufferSize = m_nBufSizeForSaveImage; //ch:输出缓存大小 | en:output buffer size
MV_CC_ConvertPixelType(m_hDevHandle, &stConvertParam);
*getImage=Mat(stImageInfo.nHeight,stImageInfo.nWidth,CV_8UC3,m_pBufForSaveImage);
//imwrite("d:\\测试opencv_Color.tif", image);
}
(*getImage).copyTo(image);
(*getImage).release();
free(pDate);
free(m_pBufForSaveImage);
return 0;
}
//获取图像高度值
int HikvisionSDK::getHeight()
{
MVCC_INTVALUE stParam;
memset(&stParam, 0, sizeof(MVCC_INTVALUE));
int tempValue=MV_CC_GetIntValue(m_hDevHandle, "Height", &stParam);
int value= stParam.nCurValue;
if(tempValue!=0)
{
return -1;
}else
{
return value;
}
}
//获取图像宽度值
int HikvisionSDK::getWidth()
{
MVCC_INTVALUE stParam;
memset(&stParam, 0, sizeof(MVCC_INTVALUE));
int tempValue=MV_CC_GetIntValue(m_hDevHandle, "Width", &stParam);
int value= stParam.nCurValue;
if(tempValue!=0)
{
return -1;
}else
{
return value;
}
}
//获取相机曝光时间
float HikvisionSDK::getExposureTime()
{
MVCC_FLOATVALUE stParam;
memset(&stParam, 0, sizeof(MVCC_INTVALUE));
int tempValue=MV_CC_GetFloatValue(m_hDevHandle, "ExposureTime", &stParam);
float value= stParam.fCurValue;
if(tempValue!=0)
{
return -1;
}else
{
return value;
}
}
//设置图像ROI高度
int HikvisionSDK::setHeight(unsigned int height)
{
int tempValue=MV_CC_SetIntValue(m_hDevHandle, "Height", height);
if(tempValue!=0)
{
return -1;
}
else
{
return 0;
}
}
//设置图像ROI宽度
int HikvisionSDK::setWidth(unsigned int width)
{
int tempValue=MV_CC_SetIntValue(m_hDevHandle, "Width", width);
if(tempValue!=0)
{
return -1;
}
else
{
return 0;
}
}
//设置图像水平偏移OffsetX
int HikvisionSDK::setOffsetX(unsigned int offsetX)
{
int tempValue=MV_CC_SetIntValue(m_hDevHandle, "OffsetX", offsetX);
if(tempValue!=0)
{
return -1;
}
else
{
return 0;
}
}
//设置图像竖直偏移OffsetY
int HikvisionSDK::setOffsetY(unsigned int offsetY)
{
int tempValue=MV_CC_SetIntValue(m_hDevHandle, "OffsetY", offsetY);
if(tempValue!=0)
{
return -1;
}
else
{
return 0;
}
}
//设置是否为触发模式
int HikvisionSDK::setTriggerMode(unsigned int TriggerModeNum)
{
//0:Off 1:On
int tempValue= MV_CC_SetEnumValue(m_hDevHandle,"TriggerMode", TriggerModeNum);
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//设置触发源
int HikvisionSDK::setTriggerSource(unsigned int TriggerSourceNum)
{
//0:Line0 1:Line1 7:Software
int tempValue= MV_CC_SetEnumValue(m_hDevHandle,"TriggerSource", TriggerSourceNum);
if(tempValue!=0)
{
return -1;
}else
{
return 0;
}
}
//设置帧率控制使能
int HikvisionSDK::setFrameRateEnable(bool comm)
{
int tempValue =MV_CC_SetBoolValue(m_hDevHandle, "AcquisitionFrameRateEnable", comm);
if (tempValue != 0)
{
return -1;
}
else
{
return 0;
}
}
//设置心跳时间
int HikvisionSDK::setHeartBeatTime(unsigned int time)
{
//心跳时间最小为500ms
if(time<500)
time=500;
int tempValue=MV_CC_SetIntValue(m_hDevHandle, "GevHeartbeatTimeout", time);
if(tempValue!=0)
{
return -1;
}
else
{
return 0;
}
}
//设置曝光时间
int HikvisionSDK::setExposureTime(float ExposureTimeNum)
{
int tempValue= MV_CC_SetFloatValue(m_hDevHandle, "ExposureTime",ExposureTimeNum );
if(tempValue!=0)
{
return -1;
}
else
{
return 0;
}
}
//关闭自动曝光
int HikvisionSDK::setExposureAuto(bool exposureAutoFlag)
{
int tempValue= MV_CC_SetEnumValue(m_hDevHandle,"ExposureAuto", exposureAutoFlag);
if (tempValue != 0)
{
return -1;
}
else
{
return 0;
}
}
//关闭自动增益
int HikvisionSDK::setGainAuto(bool gainAutoFlag)
{
int tempValue= MV_CC_SetEnumValue(m_hDevHandle,"GainAuto", gainAutoFlag);
if (tempValue != 0)
{
return -1;
}
else
{
return 0;
}
}
//清理相机缓存
void HikvisionSDK::clearBuffer()
{
//stopCamera();
//startCamera();
}
先在widget构造里面初始化定时器
// 初始化定时器
timer5 = new QTimer(this);
connect(timer5, &QTimer::timeout, this, &Widget::captureAndProcess);
我是在widget.cpp里面的按钮里面实现连接的,通过已修改的相机ID进行连接,第二个相机也类似。
// 连接相机
if(camera==nullptr)
{
camera = new HikvisionSDK();
// 连接相机
int connectResult = camera->connectCamera("yzw123");
std::cout << "Connect: " << connectResult << std::endl;
// 设置触发模式
std::cout << "TriggerMode: " << camera->setTriggerMode(1) << std::endl;
// 设置触发源为软触发
std::cout << "TriggerSource: " << camera->setTriggerSource(7) << std::endl;
// 设置曝光时间
std::cout << "SetExposureTime: " << camera->setExposureTime(40000) << std::endl;
// 判断
if (connectResult != 0)
{
// 连接失败,设为红色
ui->label_computer->setStyleSheet("background-color: red;");
ui->label_computer->setText("Disconnected");
ui->label_computer->setAlignment(Qt::AlignHCenter | Qt::AlignVCenter);
delete camera;
camera = nullptr;
//return;
}
// 开启相机采集
int startResult = camera->startCamera();
std::cout << "Start_LocatCamera: " << startResult << std::endl;
if (startResult != 0)
{
// 启动采集失败,设为红色
ui->label_computer->setStyleSheet("background-color: red;");
ui->label_computer->setText("Capture Fail");
ui->label_computer->setAlignment(Qt::AlignHCenter | Qt::AlignVCenter);
camera->closeCamera();
delete camera;
camera = nullptr;
//return;
}
// 连接和采集均成功,设为绿色
ui->label_computer->setStyleSheet("background-color: green;");
ui->label_computer->setText("Connected");
ui->label_computer->setAlignment(Qt::AlignHCenter | Qt::AlignVCenter);
// 启动定时器,50ms间隔
timer5->start(50);
}
通过定时器触发以及处理,调用display
void Widget::captureAndProcess()
{
// 处理第一个相机
if (camera != nullptr)
{
Mat* image = new Mat();
if (camera->softTrigger() == 0 && camera->ReadBuffer(*image) == 0)
{
display(image, ui->label1); // 显示到 label1
}
delete image;
}
// 新增处理第二个相机
if (cameraOCR != nullptr)
{
Mat* imageOCR = new Mat();
if (cameraOCR->softTrigger() == 0 && cameraOCR->ReadBuffer(*imageOCR) == 0)
{
OCRdisplay(imageOCR); // 显示到 label3,label4
}
delete imageOCR;
}
}
在void Widget::display(const Mat* imagePtr, QLabel* targetLabel)里面因为图片比较暗,就先进行了处理才显示,不是原图显示。在这里还有一个保存图像到临时文件,可以通过这个对图像进行处理操作,比如边缘检测,透视变换之类的。
std::cout << "so the camera" << std::endl;
// 调用增强对比度和亮度函数
cv::Mat enhancedImage = locat.enhanceContrastAndBrightness(*imagePtr);
// +++ 新增:保存增强后的图像到 locatimg 文件夹 +++
QString locatDir = QCoreApplication::applicationDirPath() + "/locatimg/";
QDir().mkpath(locatDir); // 创建目录
QString timestamp = QDateTime::currentDateTime().toString("yyyy.MM.dd.hh.mm");
QString locatimgfile = locatDir + "locat_" + timestamp + ".jpg";
cv::imwrite(locatimgfile.toStdString(), enhancedImage); // 保存增强后的图像
// 显示增强图像到 label1
QImage* QmyImage = new QImage();
if (enhancedImage.channels() > 1)
{
*QmyImage = QImage(enhancedImage.data, enhancedImage.cols, enhancedImage.rows, enhancedImage.step, QImage::Format_RGB888).rgbSwapped();
}
else
{
*QmyImage = QImage(enhancedImage.data, enhancedImage.cols, enhancedImage.rows, enhancedImage.step, QImage::Format_Grayscale8);
}
*QmyImage = QmyImage->scaled(targetLabel->size(), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
targetLabel->setPixmap(QPixmap::fromImage(*QmyImage));
delete QmyImage;
// 保存原始图像到临时文件
std::string inputPath = "temp_input.jpg";
cv::imwrite(inputPath, *imagePtr);
最后在析构函数里面要清除相应的资源
Widget::~Widget()
{
delete ui;
if (timer5 != nullptr)
{
timer5->stop();
delete timer5;
}
if (camera != nullptr)
{
camera->closeCamera();
delete camera;
}
// 新增释放第二个相机资源
if (cameraOCR != nullptr)
{
cameraOCR->closeCamera();
delete cameraOCR;
}
// 释放PLC snap7指针资源
if (snap7client != nullptr)
{
snap7client->Disconnect(); // 可选:确保断开连接
delete snap7client;
}
}
结果出画面如下: