248 lines
10 KiB
C++
Raw Normal View History

#include "DetectPresenter.h"
#include "workpieceHolePositioning_Export.h"
#include "SG_baseAlgo_Export.h"
#include "AlgoParamConverter.h"
#include "IHandEyeCalib.h"
#include <fstream>
#include <memory>
DetectPresenter::DetectPresenter(/* args */)
{
LOG_DEBUG("DetectPresenter Init algo ver: %s\n", wd_workpieceHolePositioningVersion());
}
DetectPresenter::~DetectPresenter()
{
}
QString DetectPresenter::GetAlgoVersion()
{
const char* ver = wd_workpieceHolePositioningVersion();
return ver ? QString::fromUtf8(ver) : QString();
}
int DetectPresenter::DetectWorkpieceHole(
int cameraIndex,
std::vector<std::pair<EVzResultDataType, SVzLaserLineData>>& laserLines,
const VrAlgorithmParams& algorithmParams,
const VrDebugParam& debugParam,
LaserDataLoader& dataLoader,
const double clibMatrix[16],
int eulerOrder,
int dirVectorInvert,
WorkpieceHoleDetectionResult& detectionResult)
{
if (laserLines.empty()) {
LOG_WARNING("No laser lines data available\n");
return ERR_CODE(DEV_DATA_INVALID);
}
// 获取当前相机的校准参数
VrCameraPlaneCalibParam cameraCalibParamValue;
const VrCameraPlaneCalibParam* cameraCalibParam = nullptr;
if (algorithmParams.planeCalibParam.GetCameraCalibParam(cameraIndex, cameraCalibParamValue)) {
cameraCalibParam = &cameraCalibParamValue;
}
// debug保存点云已由 BasePresenter::DetectTask() 统一处理,此处不再重复保存
std::string timeStamp = CVrDateUtils::GetNowTime();
int nRet = SUCCESS;
// 转换为算法需要的XYZ格式
std::vector<std::vector<SVzNL3DPosition>> xyzData;
int convertResult = dataLoader.ConvertToSVzNL3DPosition(laserLines, xyzData);
if (convertResult != SUCCESS || xyzData.empty()) {
LOG_WARNING("Failed to convert data to XYZ format or no XYZ data available\n");
return ERR_CODE(DEV_DATA_INVALID);
}
// 工件孔参数
WD_workpieceHoleParam workpiecePara = AlgoParamConverter::ToAlgoParam(algorithmParams.workpieceHoleParam);
// 线段分割参数
SSG_lineSegParam lineSegPara = AlgoParamConverter::ToAlgoParam(algorithmParams.lineSegParam);
// 滤波参数
SSG_outlierFilterParam filterParam = AlgoParamConverter::ToAlgoParam(algorithmParams.filterParam);
// 树生长参数
SSG_treeGrowParam growParam = AlgoParamConverter::ToAlgoParam(algorithmParams.growParam);
// 准备平面校准参数
SSG_planeCalibPara groundCalibPara = AlgoParamConverter::ToAlgoPlaneCalibParam(cameraCalibParam);
if(debugParam.enableDebug && debugParam.printDetailLog)
{
AlgoParamConverter::LogAlgoParams("[Algo Thread]",
workpiecePara, lineSegPara, filterParam, growParam, groundCalibPara, clibMatrix);
}
#if 0
// 数据预处理:调平和去除地面(使用当前相机的调平参数)
if(cameraCalibParam){
LOG_DEBUG("Processing data with plane calibration\n");
for(size_t i = 0; i < xyzData.size(); i++){
wd_lineDataR(xyzData[i], cameraCalibParam->planeCalib, -1);
}
}
#endif
int errCode = 0;
CVrTimeUtils oTimeUtils;
LOG_DEBUG("before wd_workpieceHolePositioning \n");
// 调用工件孔定位算法
std::vector<WD_workpieceInfo> workpiecePositioning;
wd_workpieceHolePositioning(
xyzData,
workpiecePara,
lineSegPara,
filterParam,
growParam,
groundCalibPara,
workpiecePositioning,
&errCode
);
LOG_DEBUG("after wd_workpieceHolePositioning \n");
LOG_INFO("wd_workpieceHolePositioning: found %zu workpieces, err=%d runtime=%.3fms\n", workpiecePositioning.size(), errCode, oTimeUtils.GetElapsedTimeInMilliSec());
ERR_CODE_RETURN(errCode);
// 创建手眼标定实例
std::unique_ptr<IHandEyeCalib, decltype(&DestroyHandEyeCalibInstance)> handEyeCalib(
CreateHandEyeCalibInstance(),
DestroyHandEyeCalibInstance);
if (!handEyeCalib) {
LOG_ERROR("Failed to create HandEyeCalib instance\n");
return ERR_CODE(DEV_NOT_FIND);
}
const HECCalibResult calibResult = HECCalibResult::fromHomogeneousArray(clibMatrix);
// 处理每个工件的检测结果
for (size_t i = 0; i < workpiecePositioning.size(); i++) {
const WD_workpieceInfo& workpiece = workpiecePositioning[i];
// 保存工件类型(仅保存第一个工件的类型)
if (i == 0) {
detectionResult.workpieceType = workpiece.workpieceType;
}
// 六轴转换:位置+姿态(转换到机械臂坐标系)
HECPoint3D eyeCenter(workpiece.center.x, workpiece.center.y, workpiece.center.z);
// 构建方向向量(眼坐标系)
std::vector<HECPoint3D> dirVectors_eye(3);
dirVectors_eye[0] = HECPoint3D(workpiece.x_dir.x, workpiece.x_dir.y, workpiece.x_dir.z);
dirVectors_eye[1] = HECPoint3D(workpiece.y_dir.x, workpiece.y_dir.y, workpiece.y_dir.z);
dirVectors_eye[2] = HECPoint3D(workpiece.z_dir.x, workpiece.z_dir.y, workpiece.z_dir.z);
// 根据配置决定方向向量反向
switch (dirVectorInvert) {
case DIR_INVERT_NONE:
break;
case DIR_INVERT_XY:
dirVectors_eye[0] = dirVectors_eye[0] * (-1.0);
dirVectors_eye[1] = dirVectors_eye[1] * (-1.0);
break;
case DIR_INVERT_XZ:
dirVectors_eye[0] = dirVectors_eye[0] * (-1.0);
dirVectors_eye[2] = dirVectors_eye[2] * (-1.0);
break;
case DIR_INVERT_YZ:
default:
dirVectors_eye[1] = dirVectors_eye[1] * (-1.0);
dirVectors_eye[2] = dirVectors_eye[2] * (-1.0);
break;
}
// 1. 位置转换R * P_eye + T
HECPoint3D ptRobot;
handEyeCalib->TransformPoint(calibResult.R, calibResult.T, eyeCenter, ptRobot);
// 2. 方向向量旋转变换(仅旋转,不平移)
std::vector<HECPoint3D> dirVectors_robot(3);
for (int j = 0; j < 3; j++) {
handEyeCalib->RotatePoint(calibResult.R, dirVectors_eye[j], dirVectors_robot[j]);
}
// 3. 构建旋转矩阵(列向量形式)
double R_pose[3][3];
R_pose[0][0] = dirVectors_robot[0].x;
R_pose[0][1] = dirVectors_robot[1].x;
R_pose[0][2] = dirVectors_robot[2].x;
R_pose[1][0] = dirVectors_robot[0].y;
R_pose[1][1] = dirVectors_robot[1].y;
R_pose[1][2] = dirVectors_robot[2].y;
R_pose[2][0] = dirVectors_robot[0].z;
R_pose[2][1] = dirVectors_robot[1].z;
R_pose[2][2] = dirVectors_robot[2].z;
// 4. 使用算法库的 rotationMatrixToEulerZYX 转换为欧拉角(返回角度)
SSG_EulerAngles robotRpy = rotationMatrixToEulerZYX(R_pose);
// 将机器人坐标系下的位姿添加到positions列表
HolePosition centerRobotPos;
centerRobotPos.x = ptRobot.x;
centerRobotPos.y = ptRobot.y;
centerRobotPos.z = ptRobot.z;
centerRobotPos.roll = robotRpy.roll;
centerRobotPos.pitch = robotRpy.pitch;
centerRobotPos.yaw = robotRpy.yaw;
detectionResult.positions.push_back(centerRobotPos);
if(debugParam.enableDebug && debugParam.printDetailLog){
LOG_INFO("[Algo Thread] Direction vectors (eye): X=(%.3f,%.3f,%.3f), Y=(%.3f,%.3f,%.3f), Z=(%.3f,%.3f,%.3f)\n",
workpiece.x_dir.x, workpiece.x_dir.y, workpiece.x_dir.z,
workpiece.y_dir.x, workpiece.y_dir.y, workpiece.y_dir.z,
workpiece.z_dir.x, workpiece.z_dir.y, workpiece.z_dir.z);
LOG_INFO("[Algo Thread] Direction vectors (robot): X=(%.3f,%.3f,%.3f), Y=(%.3f,%.3f,%.3f), Z=(%.3f,%.3f,%.3f)\n",
dirVectors_robot[0].x, dirVectors_robot[0].y, dirVectors_robot[0].z,
dirVectors_robot[1].x, dirVectors_robot[1].y, dirVectors_robot[1].z,
dirVectors_robot[2].x, dirVectors_robot[2].y, dirVectors_robot[2].z);
LOG_INFO("[Algo Thread] Center Eye Coords: X=%.3f, Y=%.3f, Z=%.3f\n",
workpiece.center.x, workpiece.center.y, workpiece.center.z);
LOG_INFO("[Algo Thread] Center Robot Coords: X=%.3f, Y=%.3f, Z=%.3f, R=%.4f, P=%.4f, Y=%.4f\n",
ptRobot.x, ptRobot.y, ptRobot.z,
centerRobotPos.roll, centerRobotPos.pitch, centerRobotPos.yaw);
}
}
// 使用 PointCloudCanvas 生成点云图像(灰色底图 + 红色孔位标记 + 中心点编号)
{
PointCloudCanvas canvas = PointCloudCanvas::Create(xyzData);
for (size_t i = 0; i < workpiecePositioning.size(); i++) {
const WD_workpieceInfo& workpiece = workpiecePositioning[i];
// 红色小圆点标记每个孔
for (size_t j = 0; j < workpiece.holes.size(); j++) {
canvas.drawPoint(workpiece.holes[j].x, workpiece.holes[j].y, QColor(255, 0, 0), 4);
}
// 中心点标记 + 白色编号
canvas.drawPoint(workpiece.center.x, workpiece.center.y, QColor(255, 0, 0), 4);
canvas.drawText(workpiece.center.x, workpiece.center.y,
QString("%1").arg(i + 1), Qt::white, 14);
}
detectionResult.image = canvas.image();
}
if(debugParam.enableDebug && debugParam.saveDebugImage){
// 获取当前时间戳格式为YYYYMMDDHHMMSS
std::string fileName = debugParam.debugOutputPath + "/Image_" + std::to_string(cameraIndex) + "_" + timeStamp + ".png";
LOG_INFO("[Algo Thread] Debug image saved image : %s\n", fileName.c_str());
// 保存检测结果图片
if (!detectionResult.image.isNull()) {
QString qFileName = QString::fromStdString(fileName);
detectionResult.image.save(qFileName);
} else {
LOG_WARNING("[Algo Thread] No valid image to save for debug\n");
}
}
return nRet;
}