对上交代码进行修改,主要将能量机关去掉,添加了同济的PnP位姿解算,但是同济有个四元数,获取IMU部分没有启用,可能导致精度不够。当前还存在反陀螺功能,修改为逻辑和弹道预测相结合,主要在时间关系上进行调整。

This commit is contained in:
2026-03-21 11:57:34 +08:00
commit 56985997ae
80 changed files with 60253 additions and 0 deletions

View File

@@ -0,0 +1,155 @@
//
// Created by xinyang on 19-3-27.
//
#ifndef _ARMOR_FINDER_H_
#define _ARMOR_FINDER_H_
#include <map>
#include <systime.h>
#include <constants.h>
#include <opencv2/core.hpp>
#include <opencv2/tracking/tracking.hpp>
#include <serial.h>
#include <armor_finder/classifier/solver.h>
#include <additions.h>
#define BLOB_RED ENEMY_RED
#define BLOB_BLUE ENEMY_BLUE
#define BOX_RED ENEMY_RED
#define BOX_BLUE ENEMY_BLUE
#define IMAGE_CENTER_X (640)
#define IMAGE_CENTER_Y (512-20)
#define DISTANCE_HEIGHT_5MM (10700.0) // 单位: cm*pixel
#define DISTANCE_HEIGHT DISTANCE_HEIGHT_5MM
#define B1 1
#define B2 2
#define B3 3
#define B4 4
#define B5 5
#define B7 6
#define B8 7
#define R1 8
#define R2 9
#define R3 10
#define R4 11
#define R5 12
#define R7 13
#define R8 14
extern std::map<int, string> id2name; //装甲板id到名称的map
extern std::map<string, int> name2id; //装甲板名称到id的map
extern std::map<string, int> prior_blue;
extern std::map<string, int> prior_red;
/******************* 灯条类定义 ***********************/
class LightBlob {
public:
cv::RotatedRect rect; //灯条位置
double area_ratio;
double length; //灯条长度
uint8_t blob_color; //灯条颜色
LightBlob(cv::RotatedRect &r, double ratio, uint8_t color) : rect(r), area_ratio(ratio), blob_color(color) {
length = max(rect.size.height, rect.size.width);
};
LightBlob() = default;
};
typedef std::vector<LightBlob> LightBlobs;
/******************* 装甲板类定义 **********************/
class ArmorBox{
public:
typedef enum{
FRONT, SIDE, UNKNOWN
} BoxOrientation;
cv::Rect2d rect;
LightBlobs light_blobs;
uint8_t box_color;
int id;
std::vector<cv::Point2f> points; // 4 corner points for PnP
explicit ArmorBox(const cv::Rect &pos=cv::Rect2d(), const LightBlobs &blobs=LightBlobs(), uint8_t color=0, int i=0, const std::vector<cv::Point2f> &pts = {});
cv::Point2f getCenter() const; // 获取装甲板中心
double getBlobsDistance() const; // 获取两个灯条中心间距
double lengthDistanceRatio() const; // 获取灯条中心距和灯条长度的比值
double getBoxDistance() const; // 获取装甲板到摄像头的距离
BoxOrientation getOrientation() const; // 获取装甲板朝向(误差较大,已弃用)
bool operator<(const ArmorBox &box) const; // 装甲板优先级比较
};
typedef std::vector<ArmorBox> ArmorBoxes;
/********************* 自瞄类定义 **********************/
class ArmorFinder{
public:
ArmorFinder(uint8_t &color, Serial &u, const string &paras_folder, const uint8_t &anti_top);
~ArmorFinder() = default;
private:
typedef cv::TrackerKCF TrackerToUse; // Tracker类型定义
typedef enum{
SEARCHING_STATE, TRACKING_STATE, STANDBY_STATE
} State; // 自瞄状态枚举定义
systime frame_time; // 当前帧对应时间
const uint8_t &enemy_color; // 敌方颜色,引用外部变量,自动变化
const uint8_t &is_anti_top; // 进入反陀螺,引用外部变量,自动变化
State state; // 自瞄状态对象实例
ArmorBox target_box, last_box; // 目标装甲板
int anti_switch_cnt; // 防止乱切目标计数器
cv::Ptr<cv::Tracker> tracker; // tracker对象实例
Classifier classifier; // CNN分类器对象实例用于数字识别
int contour_area; // 装甲区域亮点个数,用于数字识别未启用时判断是否跟丢(已弃用)
int tracking_cnt; // 记录追踪帧数,用于定时退出追踪
Serial &serial; // 串口对象
auto_aim::Solver solver; // PnP求解器
systime last_front_time; // 上次陀螺正对时间
int anti_top_cnt;
RoundQueue<double, 4> top_periodms; // 陀螺周期循环队列
vector<systime> time_seq; // 一个周期内的时间采样点
vector<float> angle_seq; // 一个周期内的角度采样点
float yaw_rotation, pitch_rotation;//云台yaw轴和pitch轴应该转到的角度
float last_yaw, last_pitch;//PID中微分项
float sum_yaw, sum_pitch;//yaw和pitch的累计误差即PID中积分项
bool findLightBlobs(const cv::Mat &src, LightBlobs &light_blobs);
bool findArmorBox(const cv::Mat &src, ArmorBox &box);
bool matchArmorBoxes(const cv::Mat &src, const LightBlobs &light_blobs, ArmorBoxes &armor_boxes);
bool stateSearchingTarget(cv::Mat &src); // searching state主函数
bool stateTrackingTarget(cv::Mat &src); // tracking state主函数
bool stateStandBy(); // stand by state主函数已弃用
void antiTop(double dist_m, double pitch_imu_deg); // 反小陀螺
cv::Point3f getTarget3D(const ArmorBox &box); // 获取目标的3D坐标 (相对于相机)
bool sendBoxPosition(uint16_t shoot_delay); // 发送装甲板位置
bool shouldFire() const { return can_fire; } // 获取开火建议
public:
void run(cv::Mat &src); // 自瞄主函数
struct HistoryItem {
cv::Point3f pos;
double time;
};
std::vector<HistoryItem> history; // 历史位置缓存,用于速度预测
cv::Point3f target_xyz; // PnP计算出的目标3D坐标 (cm)
bool can_fire; // 是否满足开启射击条件
};
#endif /* _ARMOR_FINDER_H_ */

View File

@@ -0,0 +1,27 @@
#ifndef AUTO_AIM__ARMOR_HPP
#define AUTO_AIM__ARMOR_HPP
#include <opencv2/core.hpp>
#include <Eigen/Dense>
#include <vector>
namespace auto_aim
{
enum class ArmorType { small, big };
enum class ArmorName { NO, B1, B2, B3, B4, B5, B7, B8, R1, R2, R3, R4, R5, R7, R8, outpost };
struct Armor {
ArmorType type;
ArmorName name;
std::vector<cv::Point2f> points;
Eigen::Vector3d xyz_in_gimbal;
Eigen::Vector3d xyz_in_world;
Eigen::Vector3d ypr_in_gimbal;
Eigen::Vector3d ypr_in_world;
Eigen::Vector3d ypd_in_world;
double yaw_raw;
};
} // namespace auto_aim
#endif // AUTO_AIM__ARMOR_HPP

View File

@@ -0,0 +1,58 @@
//
// Created by xinyang on 19-4-19.
//
// 为了一时方便使用循环和Eigen自行编写的CNN前向传播类。
// 没有显著的性能损失。
// 但类定义了网络结构,同时实现的操作较少,可扩展性较差
#ifndef _CLASSIFIER_H_
#define _CLASSIFIER_H_
#include <Eigen/Dense>
#include <opencv2/core/eigen.hpp>
#include <vector>
#include <string>
#include <opencv2/core.hpp>
using namespace std;
using namespace Eigen;
class Classifier {
private:
bool state; // 标志分类器是否正确初始化
// 所有网络参数
vector<vector<MatrixXd>> conv1_w, conv2_w, conv3_w;
vector<double> conv1_b, conv2_b, conv3_b;
MatrixXd fc1_w, fc2_w;
VectorXd fc1_b, fc2_b;
// 读取网络参数的函数
vector<vector<MatrixXd>> load_conv_w(const string &file);
vector<double> load_conv_b(const string &file);
MatrixXd load_fc_w(const string &file);
VectorXd load_fc_b(const string &file);
// 目前支持的所有操作
MatrixXd softmax(const MatrixXd &input);
MatrixXd relu(const MatrixXd &input);
MatrixXd leaky_relu(const MatrixXd &input, float alpha);
vector<vector<MatrixXd>> apply_bias(const vector<vector<MatrixXd>> &input, const vector<double> &bias);
vector<vector<MatrixXd>> relu(const vector<vector<MatrixXd>> &input);
vector<vector<MatrixXd>> leaky_relu(const vector<vector<MatrixXd>> &input, float alpha);
vector<vector<MatrixXd>> max_pool(const vector<vector<MatrixXd>> &input, int size);
vector<vector<MatrixXd>> mean_pool(const vector<vector<MatrixXd>> &input, int size);
vector<vector<MatrixXd>> pand(const vector<vector<MatrixXd>> &input, int val);
MatrixXd conv(const MatrixXd &filter, const MatrixXd &input);
vector<vector<MatrixXd>> conv2(const vector<vector<MatrixXd>> &filter, const vector<vector<MatrixXd>> &input);
MatrixXd flatten(const vector<vector<MatrixXd>> &input);
public:
explicit Classifier(const string &folder);
~Classifier() = default;
MatrixXd calculate(const vector<vector<MatrixXd>> &input);
explicit operator bool() const;
int operator()(const cv::Mat &image);
};
#endif /* _CLASSIFIER_H */

View File

@@ -0,0 +1,48 @@
#ifndef AUTO_AIM__SOLVER_HPP
#define AUTO_AIM__SOLVER_HPP
#include <Eigen/Dense> // 必须在opencv2/core/eigen.hpp上面
#include <Eigen/Geometry>
#include <opencv2/core/eigen.hpp>
#include "armor.hpp"
namespace auto_aim
{
class Solver
{
public:
explicit Solver(const std::string & config_path);
Eigen::Matrix3d R_gimbal2world() const;
void set_R_gimbal2world(const Eigen::Quaterniond & q);
void solve(Armor & armor) const;
std::vector<cv::Point2f> reproject_armor(
const Eigen::Vector3d & xyz_in_world, double yaw, ArmorType type, ArmorName name) const;
double oupost_reprojection_error(Armor armor, const double & picth);
std::vector<cv::Point2f> world2pixel(const std::vector<cv::Point3f> & worldPoints);
private:
cv::Mat camera_matrix_;
cv::Mat distort_coeffs_;
Eigen::Matrix3d R_gimbal2imubody_;
Eigen::Matrix3d R_camera2gimbal_;
Eigen::Vector3d t_camera2gimbal_;
Eigen::Matrix3d R_gimbal2world_;
void optimize_yaw(Armor & armor) const;
double armor_reprojection_error(const Armor & armor, double yaw, const double & inclined) const;
double SJTU_cost(
const std::vector<cv::Point2f> & cv_refs, const std::vector<cv::Point2f> & cv_pts,
const double & inclined) const;
};
} // namespace auto_aim
#endif // AUTO_AIM__SOLVER_HPP

View File

@@ -0,0 +1,61 @@
#ifndef AUTO_TRIGGER_H
#define AUTO_TRIGGER_H
#include <armor_finder/armor_finder.h>
#include <show_images/ballistic_predicition.h>
#include <config/setconfig.h>
#include <vector>
#include <numeric>
/**
* @brief 自动扳机逻辑类,负责发射时机决策。
*/
class AutoTrigger {
public:
/**
* @brief 自动扳机决策函数
* @param armor_finder 当前自瞄主对象 (用于获取历史轨迹)
* @param v0 弹丸初速度
* @param gimbal_yaw 当前云台实时自瞄角偏移 (Yaw, 度)
* @param gimbal_pitch 当前云台实时自瞄角偏移 (Pitch, 度)
* @return true 目标与预测弹着点重合,建议击发
*/
static bool should_fire(const ArmorFinder &armor_finder, double v0, double gimbal_yaw, double gimbal_pitch) {
if (armor_finder.history.size() < 3) return false; // 轨迹点不足,不预测
// 1. 获取目标当前的 3D 状态 (相对于相机)
const auto &latest = armor_finder.history.back();
double dist = latest.pos.z / 100.0; // cm -> m
// 2. 估计目标速度 (简单线性回归或两点差分)
const auto &first = armor_finder.history.front();
double dt = latest.time - first.time;
if (dt <= 0) return false;
cv::Point3f velocity = (latest.pos - first.pos) * (1.0 / dt); // cm/s
// 3. 计算飞行时间 (单位: s)
double t_flight = BallisticSolver::get_flight_time(dist, -gimbal_pitch, v0, BALLISTIC_K);
double t_total = t_flight + SYSTEM_DELAY / 1000.0;
// 4. 预测目标未来的位置 (cm)
cv::Point3f pos_pred = latest.pos + velocity * t_total;
// 5. 将预测位置映射回像素坐标 (用于判断重合)
double x_pred_pixel = pos_pred.x * FOCUS_PIXAL / pos_pred.z + IMAGE_CENTER_X;
double y_pred_pixel = pos_pred.y * FOCUS_PIXAL / pos_pred.z + IMAGE_CENTER_Y;
// 6. 推断弹着点偏移 (像素级)
// 理想打击点应该就在相机中心 (自瞄已经对准补差后的位置)
// 但是我们需要判断当前云台的 "指向误差" 是否足够小
double dist_to_center = sqrt(pow(x_pred_pixel - IMAGE_CENTER_X, 2) + pow(y_pred_pixel - IMAGE_CENTER_Y, 2));
// 7. 动态阈值判定 (依据目标距离和装甲板大小)
// 距离越远,允许的像素误差越小
double threshold = 200.0 / (dist + 0.001); // 示例阈值5米处允许40像素误差
return dist_to_center < threshold;
}
};
#endif // AUTO_TRIGGER_H

View File

@@ -0,0 +1,97 @@
/*
*** 此文件用于hero弹道着落点位通过3D建模以及理论发弹速度以及后期轨道修正手段 ***
*** 实现对敌方装甲板的精准打击 ***
*/
#ifndef BALLISTIC_PREDICITION_H
#define BALLISTIC_PREDICITION_H
#include <Eigen/Dense>
#include <opencv2/core.hpp>
/**
* 弹着点计算结果
*/
struct ImpactPoint {
Eigen::Vector3d xyz_in_camera; // 弹着点在相机坐标系中的位置 (m)
double flight_time; // 弹丸飞行时间 (s)
};
/**
* @brief 弹道计算类:根据 IMU 当前 pitch 和追踪装甲板距离,
* 推算弹着点在相机坐标系中的三维坐标。
*
* 坐标系约定(与 solver.cpp 一致):
* 世界系: x=前, y=左, z=上
* 相机系: z=前, x=右, y=下OpenCV 标准)
*/
class BallisticSolver {
public:
/**
* @brief 根据水平距离和垂直高度差,迭代求解补偿 pitch 角。
* @param d 水平距离 (m)
* @param y 垂直高度差 (m目标高于枪口为正)
* @param v0 枪口初速度 (m/s)
* @param k 空气阻力系数
* @return 补偿后的 pitch 角 (度)
*/
static double get_pitch(double d, double y, double v0, double k = 0.1);
/**
* @brief 计算弹丸飞行时间。
* @param d 目标水平距离 (m由追踪装甲板获得)
* @param pitch 当前 IMU pitch 角 (度,仰为正)
* @param v0 枪口初速度 (m/s)
* @param k 空气阻力系数
* @return 弹丸飞行时间 (s)
*/
static double get_flight_time(double d, double pitch, double v0, double k = 0.1);
/**
* @brief 根据当前 IMU pitch 角推算弹丸在水平距离 d 处的垂直高度。
* @param d 目标水平距离 (m)
* @param pitch 当前 IMU pitch 角 (度,仰为正)
* @param v0 枪口初速度 (m/s)
* @param k 空气阻力系数
* @return 弹着点相对枪口的垂直高度 (m高于枪口为正)
*/
static double get_impact_y(double d, double pitch, double v0, double k = 0.1);
/**
* [TEST] 用于验证弹道算法正确性:将弹着点反解算到相机坐标系,
* 再通过 project_to_pixel 投影到像素并用红点显示。
* 如果实验结果正确,可直接注释调用处,不影响其他结构。
*
* 坐标变换链:
* xyz_camera = R_c2g^T * (R_g2w^T * xyz_world - t_c2g)
*
* @param xyz_in_world 目标在世界坐标系中的位置 (m来自 PnP Solver)
* @param pitch_imu 当前 IMU pitch 角 (度,仰为正)
* @param v0 枪口初速度 (m/s)
* @param R_camera2gimbal 相机到云台的旋转矩阵
* @param R_gimbal2world 云台到世界的旋转矩阵
* @param t_camera2gimbal 相机原点在云台系中的平移向量 (m)
* @param k 空气阻力系数
* @return ImpactPoint { 弹着点相机坐标(m), 飞行时间(s) }
*/
static ImpactPoint get_impact_in_camera(
const Eigen::Vector3d& xyz_in_world,
double pitch_imu,
double v0,
const Eigen::Matrix3d& R_camera2gimbal,
const Eigen::Matrix3d& R_gimbal2world,
const Eigen::Vector3d& t_camera2gimbal,
double k = 0.1);
/**
* [TEST] 将相机坐标系3D点简化投影到像素坐标针孔模型无畲变
* @param xyz_in_camera 弹着点在相机坐标系中的位置 (m)
* @param camera_matrix OpenCV 相机内参矩阵 (3x3 CV_64F)
* @return 像素坐标
*/
static cv::Point2f project_to_pixel(
const Eigen::Vector3d& xyz_in_camera,
const cv::Mat& camera_matrix);
};
#endif // BALLISTIC_PREDICITION_H

View File

@@ -0,0 +1,23 @@
//
// Created by xinyang on 19-3-27.
//
#ifndef _SHOW_IMAGES_H_
#define _SHOW_IMAGES_H_
#include <opencv2/core.hpp>
#include <armor_finder/armor_finder.h>
//
void showArmorBoxes(std::string windows_name, const cv::Mat &src, const ArmorBoxes &armor_boxes);
void showArmorBox(std::string windows_name, const cv::Mat &src, const ArmorBox &armor_box);
void showLightBlobs(std::string windows_name, const cv::Mat &src, const LightBlobs &light_blobs);
void showArmorBoxesClass(std::string window_names, const cv::Mat &src, const ArmorBoxes &boxes);
void showTrackSearchingPos(std::string window_names, const cv::Mat &src, const cv::Rect2d pos);
// [TEST] 在图像上标注弹着点(红色小圆点),用于验证弹道算法正确性。
// 如果实验结果正确,直接注释调用处即可,不影响其他结构。
// impact_pixel: 由 BallisticSolver::project_to_pixel() 计算得到的像素坐标。
void showImpactPoint(const std::string& window_name, const cv::Mat& src, cv::Point2f impact_pixel);
#endif /* _SHOW_IMAGES_H_ */

View File

@@ -0,0 +1,88 @@
//
// Created by xinyang on 19-7-15.
//
#include <armor_finder/armor_finder.h>
#include <additions.h>
#include <log.h>
#include <show_images/ballistic_predicition.h>
#include <config/setconfig.h>
template<int length>
static double mean(RoundQueue<double, length> &vec) {
double sum = 0;
for (int i = 0; i < vec.size(); i++) {
sum += vec[i];
}
return sum / length;
}
static systime getFrontTime(const vector<systime> time_seq, const vector<float> angle_seq) {
double A = 0, B = 0, C = 0, D = 0;
int len = time_seq.size();
for (int i = 0; i < len; i++) {
A += angle_seq[i] * angle_seq[i];
B += angle_seq[i];
C += angle_seq[i] * time_seq[i];
D += time_seq[i];
cout << "(" << angle_seq[i] << ", " << time_seq[i] << ") ";
}
double b = (A * D - B * C) / (len * A - B * B);
cout << b << endl;
return b;
}
void ArmorFinder::antiTop(double dist_m, double pitch_imu_deg) {
if (target_box.rect == cv::Rect2d()) return;
// 判断是否发生装甲目标切换。
// 记录切换前一段时间目标装甲的角度和时间
// 通过线性拟合计算出角度为0时对应的时间点
// 通过两次装甲角度为零的时间差计算陀螺旋转周期
// 根据旋转周期计算下一次装甲出现在角度为零的时间点
if (getPointLength(last_box.getCenter() - target_box.getCenter()) > last_box.rect.height * 1.5) {
auto front_time = getFrontTime(time_seq, angle_seq);
auto once_periodms = getTimeIntervalms(front_time, last_front_time);
// if (abs(once_periodms - top_periodms[-1]) > 50) {
// sendBoxPosition(0);
// return;
// }
LOGM(STR_CTR(WORD_GREEN, "Top period: %.1lf"), once_periodms);
top_periodms.push(once_periodms);
auto periodms = mean(top_periodms);
systime curr_time;
getsystime(curr_time);
// 飞行时间补偿ms
double fly_time_ms = BallisticSolver::get_flight_time(
dist_m, pitch_imu_deg, MUZZLE_VELOCITY, BALLISTIC_K) * 1000.0;
constexpr double sys_delay_ms = SYSTEM_DELAY; // setconfig.h 默认 50 ms
// 修正公式:子弹命中时刻 = 发令时刻 + shoot_delay + sys_delay + fly_time
// 令子弹命中时刻 = front_time + periodms×2
int32_t delay_raw = static_cast<int32_t>(
front_time + periodms * 2 - curr_time - sys_delay_ms - fly_time_ms);
// 若错过当前窗口delay_raw < 0顺延一个周期
uint16_t shoot_delay = (delay_raw > 0)
? static_cast<uint16_t>(delay_raw)
: static_cast<uint16_t>(delay_raw + static_cast<int32_t>(periodms));
if (anti_top_cnt < 4) {
sendBoxPosition(0);
} else if (abs(once_periodms - top_periodms[-1]) > 50) {
sendBoxPosition(0);
} else {
sendBoxPosition(shoot_delay);
}
time_seq.clear();
angle_seq.clear();
last_front_time = front_time;
} else {
time_seq.emplace_back(frame_time);
double dx = target_box.rect.x + target_box.rect.width / 2 - IMAGE_CENTER_X;
double yaw = atan(dx / FOCUS_PIXAL) * 180 / PI;
angle_seq.emplace_back(yaw);
sendBoxPosition(0);
}
anti_top_cnt++;
}

View File

@@ -0,0 +1,99 @@
//
// Created by xinyang on 19-7-13.
//
#include <armor_finder/armor_finder.h>
#include <log.h>
ArmorBox::ArmorBox(const cv::Rect &pos, const LightBlobs &blobs, uint8_t color, int i, const std::vector<cv::Point2f> &pts) :
rect(pos), light_blobs(blobs), box_color(color), id(i), points(pts) {};
// 获取装甲板中心点
cv::Point2f ArmorBox::getCenter() const {
return cv::Point2f(
rect.x + rect.width / 2,
rect.y + rect.height / 2
);
}
// 获取两个灯条中心点的间距
double ArmorBox::getBlobsDistance() const {
if (light_blobs.size() == 2) {
auto &x = light_blobs[0].rect.center;
auto &y = light_blobs[1].rect.center;
return sqrt((x.x - y.x) * (x.x - y.x) + (x.y - y.y) * (x.y - y.y));
} else {
return 0;
}
}
// 获取灯条长度和间距的比例
double ArmorBox::lengthDistanceRatio() const {
if (light_blobs.size() == 2) {
return max(light_blobs[0].length, light_blobs[1].length)
/ getBlobsDistance();
} else {
return 100;
}
}
// 计算装甲板到摄像头的距离
double ArmorBox::getBoxDistance() const {
if (light_blobs.size() == 2) {
return DISTANCE_HEIGHT / 2 / max(light_blobs[0].length, light_blobs[1].length);
} else {
return DISTANCE_HEIGHT / rect.height;
}
}
// 判断装甲板的正对还是侧对(已弃用,误差过大)
ArmorBox::BoxOrientation ArmorBox::getOrientation() const {
if (light_blobs.size() != 2) {
return UNKNOWN;
}
switch (id) {
case R1:
case R7:
case R8:
case B1:
case B7:
case B8:
if (lengthDistanceRatio() < 0.26) {
return FRONT;
} else {
return SIDE;
}
case R2:
case R3:
case R4:
case R5:
case B2:
case B3:
case B4:
case B5:
if (lengthDistanceRatio() < 0.51) {
return FRONT;
} else {
return SIDE;
}
default:
return UNKNOWN;
}
}
// 装甲板的优先级比较
bool ArmorBox::operator<(const ArmorBox &box) const {
if (id != box.id) {
if (box_color == BOX_BLUE) {
return prior_blue[id2name[id]] < prior_blue[id2name[box.id]];
} else {
return prior_red[id2name[id]] < prior_red[id2name[box.id]];
}
} else {
auto d1 = (rect.x - IMAGE_CENTER_X) * (rect.x - IMAGE_CENTER_X)
+ (rect.y - IMAGE_CENTER_Y) * (rect.y - IMAGE_CENTER_Y);
auto d2 = (box.rect.x - IMAGE_CENTER_X) * (box.rect.x - IMAGE_CENTER_X)
+ (box.rect.y - IMAGE_CENTER_Y) * (box.rect.y - IMAGE_CENTER_Y);
return d1 < d2;
}
}

View File

@@ -0,0 +1,148 @@
//
// Created by xinyang on 19-3-27.
//
/*===========================================================================*/
/* 使用本代码的兵种 */
/*===========================================================================*/
/* _______________ _______________ _______________ _______________ */
/* | _____ | | _ _ | | ____ | | _____ | */
/* || |___ / || || | || | || || | ___| || || |___ | || */
/* || |_ \ || || | || |_ || || |___ \ || || / / || */
/* || ___) | || || |__ _| || || ___) | || || / / || */
/* || |____/ || || |_| || || |____/ || || /_/ || */
/* |_______________| |_______________| |_______________| |_______________| */
/* */
/*===========================================================================*/
#define LOG_LEVEL LOG_NONE
#include <log.h>
#include <options.h>
#include <show_images/show_images.h>
#include <opencv2/highgui.hpp>
#include <armor_finder/armor_finder.h>
std::map<int, string> id2name = { //装甲板id到名称的map
{-1, "OO"},{ 0, "NO"},
{ 1, "B1"},{ 2, "B2"},{ 3, "B3"},{ 4, "B4"},{ 5, "B5"},{ 6, "B7"},{ 7, "B8"},
{ 8, "R1"},{ 9, "R2"},{10, "R3"},{11, "R4"},{12, "R5"},{13, "R7"},{14, "R8"},
};
std::map<string, int> name2id = { //装甲板名称到id的map
{"OO", -1},{"NO", 0},
{"B1", 1},{"B2", 2},{"B3", 3},{"B4", 4},{"B5", 5},{"B7", 6},{"B8", 7},
{"R1", 8},{"R2", 9},{"R3", 10},{"R4", 11},{"R5", 12},{"R7", 13},{"R8", 14},
};
std::map<string, int> prior_blue = {
{"B8", 0}, {"B1", 1}, {"B3", 2}, {"B4", 2}, {"B5", 2}, {"B7", 3}, {"B2", 4},
{"R8", 5}, {"R1", 6}, {"R3", 7}, {"R4", 7}, {"R5", 7}, {"R7", 8}, {"R2", 9},
{"NO", 10},
};
std::map<string, int> prior_red = {
{"R8", 0}, {"R1", 1}, {"R3", 2}, {"R4", 2}, {"R5", 2}, {"R7", 3}, {"R2", 4},
{"B8", 5}, {"B1", 6}, {"B3", 7}, {"B4", 7}, {"B5", 7}, {"B7", 8}, {"B2", 9},
{"NO", 10},
};
ArmorFinder::ArmorFinder(uint8_t &color, Serial &u, const string &paras_folder, const uint8_t &anti_top) :
serial(u),
enemy_color(color),
is_anti_top(anti_top),
state(STANDBY_STATE),
anti_switch_cnt(0),
classifier(paras_folder),
contour_area(0),
tracking_cnt(0),
solver(PROJECT_DIR"/others/solver_config.yml") {
}
void ArmorFinder::run(cv::Mat &src) {
getsystime(frame_time); // 获取当前帧时间(不是足够精确)
// stateSearchingTarget(src); // for debug
// goto end;
switch (state) {
case SEARCHING_STATE:
if (stateSearchingTarget(src)) {
if ((target_box.rect & cv::Rect2d(0, 0, 640, 480)) == target_box.rect) { // 判断装甲板区域是否脱离图像区域
if (!classifier) { /* 如果分类器不可用 */
cv::Mat roi = src(target_box.rect).clone(), roi_gray; /* 就使用装甲区域亮点数判断是否跟丢 */
cv::cvtColor(roi, roi_gray, cv::COLOR_RGB2GRAY);
cv::threshold(roi_gray, roi_gray, 180, 255, cv::THRESH_BINARY);
contour_area = cv::countNonZero(roi_gray);
}
tracker = TrackerToUse::create(); // 成功搜寻到装甲板创建tracker对象
tracker->init(src, target_box.rect);
state = TRACKING_STATE;
tracking_cnt = 0;
LOGM(STR_CTR(WORD_LIGHT_CYAN, "into track"));
}
}
break;
case TRACKING_STATE:
if (!stateTrackingTarget(src) || ++tracking_cnt > 100) { // 最多追踪100帧图像
state = SEARCHING_STATE;
LOGM(STR_CTR(WORD_LIGHT_YELLOW, "into search!"));
}
break;
case STANDBY_STATE:
default:
stateStandBy(); // currently meaningless
}
end:
if(is_anti_top) { // 判断当前是否为反陀螺模式
// dist_m: 目标水平距离(m),由 target_xyz (cm) 计算
double dist_m = std::sqrt(
target_xyz.x * target_xyz.x + target_xyz.z * target_xyz.z) / 100.0;
// TODO: pitch_imu_deg 替换为从串口/IMU读入的实际 pitch 角 (度,仰为正)
double pitch_imu_deg = 0.0;
antiTop(dist_m, pitch_imu_deg);
}else if(target_box.rect != cv::Rect2d()) {
anti_top_cnt = 0;
time_seq.clear();
angle_seq.clear();
sendBoxPosition(0);
}
if(target_box.rect != cv::Rect2d()){
last_box = target_box;
}
if (show_armor_box) { // 根据条件显示当前目标装甲板
showArmorBox("box", src, target_box);
cv::waitKey(1);
}
// 自动扳机位置预测逻辑:更新历史位置
if (target_box.rect != cv::Rect2d()) {
auto_aim::Armor armor;
armor.type = (target_box.id == B1 || target_box.id == R1 || target_box.id == B2 || target_box.id == R2 || target_box.id == B7 || target_box.id == R7 || target_box.id == B8 || target_box.id == R8) ? auto_aim::ArmorType::big : auto_aim::ArmorType::small;
armor.name = static_cast<auto_aim::ArmorName>(target_box.id); // This might need mapping
armor.points = target_box.points;
// 获取当前云台位姿 (假设 mcu_data 有 q)
// solver.set_R_gimbal2world(Eigen::Quaterniond(mcu_data.q[0], mcu_data.q[1], mcu_data.q[2], mcu_data.q[3]));
solver.solve(armor);
target_xyz = cv::Point3f(armor.xyz_in_gimbal.x() * 100.0, armor.xyz_in_gimbal.y() * 100.0, armor.xyz_in_gimbal.z() * 100.0); // 转换为cm
cv::Point3f current_pos = target_xyz;
double current_time = static_cast<double>(frame_time.tv_sec) + static_cast<double>(frame_time.tv_usec) / 1e6;
history.push_back({current_pos, current_time});
if (history.size() > 10) { // 仅保留最近10帧
history.erase(history.begin());
}
} else {
history.clear(); // 目标丢失,清空历史
}
}
cv::Point3f ArmorFinder::getTarget3D(const ArmorBox &box) {
double z = box.getBoxDistance(); // 单位: cm
double x = z * (box.getCenter().x - IMAGE_CENTER_X) / FOCUS_PIXAL;
double y = z * (box.getCenter().y - IMAGE_CENTER_Y) / FOCUS_PIXAL;
return cv::Point3f(x, y, z);
}

View File

@@ -0,0 +1,332 @@
//
// Created by xinyang on 19-4-19.
//
// 对本文件的大致描述请看classifier.h
//#define LOG_LEVEL LOG_NONE
#include <armor_finder/classifier/classifier.h>
#include <log.h>
#include <cstdio>
#include <iostream>
vector<vector<MatrixXd>> Classifier::load_conv_w(const string &file){
vector<vector<MatrixXd>> result;
FILE *fp = fopen(file.data(), "r");
if(fp == nullptr){
LOGE("%s open fail!", file.data());
state = false;
return result;
}
int channel_in, channel_out, row, col;
fscanf(fp, "%d %d %d %d", &channel_in, &channel_out, &row, &col);
for(int o=0; o<channel_in; o++){
vector<MatrixXd> sub;
for(int i=0; i<channel_out; i++){
MatrixXd f(row, col);
for(int r=0; r<row; r++){
for(int c=0; c<col; c++){
fscanf(fp, "%lf", &f(r, c));
}
}
sub.emplace_back(f);
}
result.emplace_back(sub);
}
return result;
}
vector<double> Classifier::load_conv_b(const string &file){
vector<double> result;
FILE *fp = fopen(file.data(), "r");
if(fp == nullptr){
LOGE("%s open fail!", file.data());
state = false;
return result;
}
int len;
fscanf(fp, "%d", &len);
for(int i=0; i<len; i++) {
double v;
fscanf(fp, "%lf", &v);
result.emplace_back(v);
}
return result;
}
MatrixXd Classifier::load_fc_w(const string &file){
FILE *fp = fopen(file.data(), "r");
if(fp == nullptr){
LOGE("%s open fail!", file.data());
state = false;
return MatrixXd::Zero(1,1);
}
int row, col;
fscanf(fp, "%d %d", &col, &row);
MatrixXd mat(row, col);
for(int c=0; c<col; c++){
for(int r=0; r<row; r++){
fscanf(fp, "%lf", &mat(r, c));
}
}
return mat;
}
VectorXd Classifier::load_fc_b(const string &file){
FILE *fp = fopen(file.data(), "r");
if(fp == nullptr){
LOGE("%s open fail!", file.data());
state = false;
return VectorXd::Zero(1,1);
}
int row;
fscanf(fp, "%d", &row);
VectorXd vec(row, 1);
for(int r=0; r<row; r++){
fscanf(fp, "%lf", &vec(r));
}
return vec;
}
MatrixXd Classifier::softmax(const MatrixXd &input){
MatrixXd tmp = input.array() - input.maxCoeff();
return tmp.array().exp() / tmp.array().exp().sum();
}
vector<vector<MatrixXd>> Classifier::max_pool(const vector<vector<MatrixXd>> &input, int size){
vector<vector<MatrixXd>> output;
for(int sample=0; sample<input.size(); sample++) {
vector<MatrixXd> sub;
for (int channel = 0; channel < input[0].size(); channel++) {
MatrixXd tmp(input[0][0].rows() / size, input[0][0].cols() / size);
for (int row = 0; row < input[0][0].rows() / size; row++) {
for (int col = 0; col < input[0][0].cols() / size; col++) {
double max = 0;
for (int x = 0; x < size; x++) {
for (int y = 0; y < size; y++) {
if(max < input[sample][channel](row * size + x, col * size + y)){
max = input[sample][channel](row * size + x, col * size + y);
}
}
}
tmp(row, col) = max;
}
}
sub.emplace_back(tmp);
}
output.emplace_back(sub);
}
return output;
}
vector<vector<MatrixXd>> Classifier::mean_pool(const vector<vector<MatrixXd>> &input, int size){
vector<vector<MatrixXd>> output;
for(int sample=0; sample<input.size(); sample++) {
vector<MatrixXd> sub;
for (int channel = 0; channel < input[0].size(); channel++) {
MatrixXd tmp(input[0][0].rows() / size, input[0][0].cols() / size);
for (int row = 0; row < input[0][0].rows() / size; row++) {
for (int col = 0; col < input[0][0].cols() / size; col++) {
double val = 0;
for (int x = 0; x < size; x++) {
for (int y = 0; y < size; y++) {
val += input[sample][channel](row * size + x, col * size + y);
}
}
tmp(row, col) = val / size / size;
}
}
sub.emplace_back(tmp);
}
output.emplace_back(sub);
}
return output;
}
vector<vector<MatrixXd>> Classifier::apply_bias(const vector<vector<MatrixXd>> &input, const vector<double> &bias){
assert(input[0].size()==bias.size());
vector<vector<MatrixXd>> result;
for(int samples=0; samples<input.size(); samples++){
vector<MatrixXd> sub;
for(int channels=0; channels<input[0].size(); channels++){
MatrixXd mat = input[samples][channels].array() + bias[channels];
sub.emplace_back(mat);
}
result.emplace_back(sub);
}
return result;
}
MatrixXd Classifier::relu(const MatrixXd &input){
return input.unaryExpr([](double val){
return (val>0)?(val):(0);
});
}
MatrixXd Classifier::leaky_relu(const MatrixXd &input, float alpha){
return input.unaryExpr([&](double val){
return (val>0)?(val):(alpha*val);
});
}
vector<vector<MatrixXd>> Classifier::relu(const vector<vector<MatrixXd>> &input){
vector<vector<MatrixXd>> result;
for(int samples=0; samples<input.size(); samples++){
vector<MatrixXd> sub;
for(int channels=0; channels<input[0].size(); channels++){
sub.emplace_back(relu(input[samples][channels]));
}
result.emplace_back(sub);
}
return result;
}
vector<vector<MatrixXd>> Classifier::leaky_relu(const vector<vector<MatrixXd>> &input, float alpha){
vector<vector<MatrixXd>> result;
for(int samples=0; samples<input.size(); samples++){
vector<MatrixXd> sub;
for(int channels=0; channels<input[0].size(); channels++){
sub.emplace_back(leaky_relu(input[samples][channels], alpha));
}
result.emplace_back(sub);
}
return result;
}
vector<vector<MatrixXd>> Classifier::pand(const vector<vector<MatrixXd>> &input, int val){
vector<vector<MatrixXd>> result;
for(int sample=0; sample<input.size(); sample++){
vector<MatrixXd> sub;
for(int channels=0; channels<input[0].size(); channels++){
MatrixXd mat = MatrixXd::Zero(input[0][0].rows()+2*val, input[0][0].cols()+2*val);
mat.block(val, val, input[0][0].rows(), input[0][0].cols()) = input[sample][channels];
sub.emplace_back(mat);
}
result.emplace_back(sub);
}
return result;
}
MatrixXd Classifier::conv(const MatrixXd &filter, const MatrixXd &input){
int result_rows = input.rows()-filter.rows()+1;
int result_cols = input.cols()-filter.cols()+1;
MatrixXd result(result_rows, result_cols);
for(int row=0; row<result_rows; row++){
for(int col=0; col<result_cols; col++){
double val=0;
for(int x=0; x<filter.cols(); x++){
for(int y=0; y<filter.cols(); y++){
val += input(row+x, col+y) * filter(x,y);
}
}
result(row, col) = val;
// result(row, col) = (input.block(row, col, size, size).array() * input.array()).sum();
}
}
return result;
}
vector<vector<MatrixXd>> Classifier::conv2(const vector<vector<MatrixXd>> &filter, const vector<vector<MatrixXd>> &input){
if(filter.size() != input[0].size()){
LOGE("shape du not match, which is filter.size=%d, input[0].size()=%d", filter.size(), input[0].size());
exit(-1);
}
vector<vector<MatrixXd>> result;
int result_rows = input[0][0].rows()-filter[0][0].rows()+1;
int result_cols = input[0][0].cols()-filter[0][0].cols()+1;
for(int col=0; col<input.size(); col++){
vector<MatrixXd> sub;
for(int row=0; row<filter[0].size(); row++){
MatrixXd val = MatrixXd::Zero(result_rows, result_cols);
for(int x=0; x<filter.size(); x++){
val += conv(filter[x][row], input[col][x]);
}
sub.emplace_back(val);
}
result.emplace_back(sub);
}
return result;
}
MatrixXd Classifier::flatten(const vector<vector<MatrixXd>> &input){
int ones = input[0][0].rows()*input[0][0].cols();
int channels = input[0].size();
int samples = input.size();
int row = input[0][0].rows();
int col = input[0][0].cols();
MatrixXd output(channels*ones,samples);
for(int s=0; s<samples; s++) {
for(int r=0, cnt=0; r<row; r++){
for(int c=0; c<col; c++){
for (int i = 0; i < channels; i++) {
output(cnt++, s) = input[s][i](r, c);
}
}
}
}
return output;
}
Classifier::Classifier(const string &folder) : state(true){
conv1_w = load_conv_w(folder+"conv1_w");
conv1_b = load_conv_b(folder+"conv1_b");
conv2_w = load_conv_w(folder+"conv2_w");
conv2_b = load_conv_b(folder+"conv2_b");
conv3_w = load_conv_w(folder+"conv3_w");
conv3_b = load_conv_b(folder+"conv3_b");
fc1_w = load_fc_w(folder+"fc1_w");
fc1_b = load_fc_b(folder+"fc1_b");
fc2_w = load_fc_w(folder+"fc2_w");
fc2_b = load_fc_b(folder+"fc2_b");
if(state){
LOGM("Load para success!");
}
}
//#define PRINT_MAT(name) (cout << #name":\n" << name << endl)
//#define PRINT_MULTI_MAT(name) (cout << #name":\n" << name[0][0] << endl)
//#define PRINT_MAT_SHAPE(name) LOGM(#name":(%d, %d)", name.rows(), name.cols())
//#define PRINT_MULTI_MAT_SHAPE(name) LOGM(#name":(%d, %d)", name[0][0].rows(), name[0][0].cols())
MatrixXd Classifier::calculate(const vector<vector<MatrixXd>> &input) {
vector<vector<MatrixXd>> conv1_result = relu(apply_bias(conv2(conv1_w, input), conv1_b));
vector<vector<MatrixXd>> pool1_result = mean_pool(conv1_result, 2);
vector<vector<MatrixXd>> conv2_result = relu(apply_bias(conv2(conv2_w, pool1_result), conv2_b));
vector<vector<MatrixXd>> pool2_result = mean_pool(conv2_result, 2);
vector<vector<MatrixXd>> conv3_result = relu(apply_bias(conv2(conv3_w, pool2_result), conv3_b));
MatrixXd flattened = flatten(conv3_result);
MatrixXd y1 = fc1_w * flattened;
y1.colwise() += fc1_b;
MatrixXd fc1 = relu(y1);
MatrixXd y2 = fc2_w * fc1;
y2.colwise() += fc2_b;
MatrixXd fc2 = softmax(y2);
return fc2;
}
Classifier::operator bool() const {
return state;
}
int Classifier::operator()(const cv::Mat &image) {
MatrixXd r, g, b;
std::vector<cv::Mat> channels;
cv::split(image, channels);
cv2eigen(channels[0], b);
cv2eigen(channels[1], g);
cv2eigen(channels[2], r);
r /= 255;
g /= 255;
b /= 255;
vector<MatrixXd> sub = {b, g, r};
vector<vector<MatrixXd>> in = {sub};
MatrixXd result = calculate(in);
MatrixXd::Index minRow, minCol;
result.maxCoeff(&minRow, &minCol);
if(result(minRow, minCol) > 0.50){
return minRow;
}else{
return 0;
}
}

View File

@@ -0,0 +1,295 @@
#include "solver.h"
#include <yaml-cpp/yaml.h>
#include <vector>
#include "tools/logger.hpp"
#include "tools/math_tools.hpp"
namespace auto_aim
{
constexpr double LIGHTBAR_LENGTH = 56e-3; // m
constexpr double BIG_ARMOR_WIDTH = 230e-3; // m
constexpr double SMALL_ARMOR_WIDTH = 135e-3; // m
const std::vector<cv::Point3f> BIG_ARMOR_POINTS{
{0, BIG_ARMOR_WIDTH / 2, LIGHTBAR_LENGTH / 2},
{0, -BIG_ARMOR_WIDTH / 2, LIGHTBAR_LENGTH / 2},
{0, -BIG_ARMOR_WIDTH / 2, -LIGHTBAR_LENGTH / 2},
{0, BIG_ARMOR_WIDTH / 2, -LIGHTBAR_LENGTH / 2}};
const std::vector<cv::Point3f> SMALL_ARMOR_POINTS{
{0, SMALL_ARMOR_WIDTH / 2, LIGHTBAR_LENGTH / 2},
{0, -SMALL_ARMOR_WIDTH / 2, LIGHTBAR_LENGTH / 2},
{0, -SMALL_ARMOR_WIDTH / 2, -LIGHTBAR_LENGTH / 2},
{0, SMALL_ARMOR_WIDTH / 2, -LIGHTBAR_LENGTH / 2}};
Solver::Solver(const std::string & config_path) : R_gimbal2world_(Eigen::Matrix3d::Identity())
{
auto yaml = YAML::LoadFile(config_path);
/*
auto R_gimbal2imubody_data = yaml["R_gimbal2imubody"].as<std::vector<double>>();
auto R_camera2gimbal_data = yaml["R_camera2gimbal"].as<std::vector<double>>();
auto t_camera2gimbal_data = yaml["t_camera2gimbal"].as<std::vector<double>>();
R_gimbal2imubody_ = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>(R_gimbal2imubody_data.data());
R_camera2gimbal_ = Eigen::Matrix<double, 3, 3, Eigen::RowMajor>(R_camera2gimbal_data.data())
t_camera2gimbal_ = Eigen::Matrix<double, 3, 1>(t_camera2gimbal_data.data());
*/
auto camera_matrix_data = yaml["camera_matrix"].as<std::vector<double>>();
auto distort_coeffs_data = yaml["distort_coeffs"].as<std::vector<double>>();
Eigen::Matrix<double, 3, 3, Eigen::RowMajor> camera_matrix(camera_matrix_data.data());
Eigen::Matrix<double, 1, 5> distort_coeffs(distort_coeffs_data.data());
cv::eigen2cv(camera_matrix, camera_matrix_);
cv::eigen2cv(distort_coeffs, distort_coeffs_);
}
Eigen::Matrix3d Solver::R_gimbal2world() const { return R_gimbal2world_; }
void Solver::set_R_gimbal2world(const Eigen::Quaterniond & q)
{
Eigen::Matrix3d R_imubody2imuabs = q.toRotationMatrix();
R_gimbal2world_ = R_gimbal2imubody_.transpose() * R_imubody2imuabs * R_gimbal2imubody_;
}
//solvePnP获得姿态
void Solver::solve(Armor & armor) const
{
const auto & object_points =
(armor.type == ArmorType::big) ? BIG_ARMOR_POINTS : SMALL_ARMOR_POINTS;
cv::Vec3d rvec, tvec;
cv::solvePnP(
object_points, armor.points, camera_matrix_, distort_coeffs_, rvec, tvec, false,
cv::SOLVEPNP_IPPE);
Eigen::Vector3d xyz_in_camera;
cv::cv2eigen(tvec, xyz_in_camera);
armor.xyz_in_gimbal = R_camera2gimbal_ * xyz_in_camera + t_camera2gimbal_;
armor.xyz_in_world = R_gimbal2world_ * armor.xyz_in_gimbal;
cv::Mat rmat;
cv::Rodrigues(rvec, rmat);
Eigen::Matrix3d R_armor2camera;
cv::cv2eigen(rmat, R_armor2camera);
Eigen::Matrix3d R_armor2gimbal = R_camera2gimbal_ * R_armor2camera;
Eigen::Matrix3d R_armor2world = R_gimbal2world_ * R_armor2gimbal;
armor.ypr_in_gimbal = tools::eulers(R_armor2gimbal, 2, 1, 0);
armor.ypr_in_world = tools::eulers(R_armor2world, 2, 1, 0);
armor.ypd_in_world = tools::xyz2ypd(armor.xyz_in_world);
// 平衡不做yaw优化因为pitch假设不成立
auto is_balance = (armor.type == ArmorType::big) &&
(armor.name == ArmorName::three || armor.name == ArmorName::four ||
armor.name == ArmorName::five);
if (is_balance) return;
optimize_yaw(armor);
}
std::vector<cv::Point2f> Solver::reproject_armor(
const Eigen::Vector3d & xyz_in_world, double yaw, ArmorType type, ArmorName name) const
{
auto sin_yaw = std::sin(yaw);
auto cos_yaw = std::cos(yaw);
auto pitch = (name == ArmorName::outpost) ? -15.0 * CV_PI / 180.0 : 15.0 * CV_PI / 180.0;
auto sin_pitch = std::sin(pitch);
auto cos_pitch = std::cos(pitch);
// clang-format off
const Eigen::Matrix3d R_armor2world {
{cos_yaw * cos_pitch, -sin_yaw, cos_yaw * sin_pitch},
{sin_yaw * cos_pitch, cos_yaw, sin_yaw * sin_pitch},
{ -sin_pitch, 0, cos_pitch}
};
// clang-format on
// get R_armor2camera t_armor2camera
const Eigen::Vector3d & t_armor2world = xyz_in_world;
Eigen::Matrix3d R_armor2camera =
R_camera2gimbal_.transpose() * R_gimbal2world_.transpose() * R_armor2world;
Eigen::Vector3d t_armor2camera =
R_camera2gimbal_.transpose() * (R_gimbal2world_.transpose() * t_armor2world - t_camera2gimbal_);
// get rvec tvec
cv::Vec3d rvec;
cv::Mat R_armor2camera_cv;
cv::eigen2cv(R_armor2camera, R_armor2camera_cv);
cv::Rodrigues(R_armor2camera_cv, rvec);
cv::Vec3d tvec(t_armor2camera[0], t_armor2camera[1], t_armor2camera[2]);
// reproject
std::vector<cv::Point2f> image_points;
const auto & object_points = (type == ArmorType::big) ? BIG_ARMOR_POINTS : SMALL_ARMOR_POINTS;
cv::projectPoints(object_points, rvec, tvec, camera_matrix_, distort_coeffs_, image_points);
return image_points;
}
double Solver::oupost_reprojection_error(Armor armor, const double & pitch)
{
// solve
const auto & object_points =
(armor.type == ArmorType::big) ? BIG_ARMOR_POINTS : SMALL_ARMOR_POINTS;
cv::Vec3d rvec, tvec;
cv::solvePnP(
object_points, armor.points, camera_matrix_, distort_coeffs_, rvec, tvec, false,
cv::SOLVEPNP_IPPE);
Eigen::Vector3d xyz_in_camera;
cv::cv2eigen(tvec, xyz_in_camera);
armor.xyz_in_gimbal = R_camera2gimbal_ * xyz_in_camera + t_camera2gimbal_;
armor.xyz_in_world = R_gimbal2world_ * armor.xyz_in_gimbal;
cv::Mat rmat;
cv::Rodrigues(rvec, rmat);
Eigen::Matrix3d R_armor2camera;
cv::cv2eigen(rmat, R_armor2camera);
Eigen::Matrix3d R_armor2gimbal = R_camera2gimbal_ * R_armor2camera;
Eigen::Matrix3d R_armor2world = R_gimbal2world_ * R_armor2gimbal;
armor.ypr_in_gimbal = tools::eulers(R_armor2gimbal, 2, 1, 0);
armor.ypr_in_world = tools::eulers(R_armor2world, 2, 1, 0);
armor.ypd_in_world = tools::xyz2ypd(armor.xyz_in_world);
auto yaw = armor.ypr_in_world[0];
auto xyz_in_world = armor.xyz_in_world;
auto sin_yaw = std::sin(yaw);
auto cos_yaw = std::cos(yaw);
auto sin_pitch = std::sin(pitch);
auto cos_pitch = std::cos(pitch);
// clang-format off
const Eigen::Matrix3d _R_armor2world {
{cos_yaw * cos_pitch, -sin_yaw, cos_yaw * sin_pitch},
{sin_yaw * cos_pitch, cos_yaw, sin_yaw * sin_pitch},
{ -sin_pitch, 0, cos_pitch}
};
// clang-format on
// get R_armor2camera t_armor2camera
const Eigen::Vector3d & t_armor2world = xyz_in_world;
Eigen::Matrix3d _R_armor2camera =
R_camera2gimbal_.transpose() * R_gimbal2world_.transpose() * _R_armor2world;
Eigen::Vector3d t_armor2camera =
R_camera2gimbal_.transpose() * (R_gimbal2world_.transpose() * t_armor2world - t_camera2gimbal_);
// get rvec tvec
cv::Vec3d _rvec;
cv::Mat R_armor2camera_cv;
cv::eigen2cv(_R_armor2camera, R_armor2camera_cv);
cv::Rodrigues(R_armor2camera_cv, _rvec);
cv::Vec3d _tvec(t_armor2camera[0], t_armor2camera[1], t_armor2camera[2]);
// reproject
std::vector<cv::Point2f> image_points;
cv::projectPoints(object_points, _rvec, _tvec, camera_matrix_, distort_coeffs_, image_points);
auto error = 0.0;
for (int i = 0; i < 4; i++) error += cv::norm(armor.points[i] - image_points[i]);
return error;
}
void Solver::optimize_yaw(Armor & armor) const
{
Eigen::Vector3d gimbal_ypr = tools::eulers(R_gimbal2world_, 2, 1, 0);
constexpr double SEARCH_RANGE = 140; // degree
auto yaw0 = tools::limit_rad(gimbal_ypr[0] - SEARCH_RANGE / 2 * CV_PI / 180.0);
auto min_error = 1e10;
auto best_yaw = armor.ypr_in_world[0];
for (int i = 0; i < SEARCH_RANGE; i++) {
double yaw = tools::limit_rad(yaw0 + i * CV_PI / 180.0);
auto error = armor_reprojection_error(armor, yaw, (i - SEARCH_RANGE / 2) * CV_PI / 180.0);
if (error < min_error) {
min_error = error;
best_yaw = yaw;
}
}
armor.yaw_raw = armor.ypr_in_world[0];
armor.ypr_in_world[0] = best_yaw;
}
double Solver::SJTU_cost(
const std::vector<cv::Point2f> & cv_refs, const std::vector<cv::Point2f> & cv_pts,
const double & inclined) const
{
std::size_t size = cv_refs.size();
std::vector<Eigen::Vector2d> refs;
std::vector<Eigen::Vector2d> pts;
for (std::size_t i = 0u; i < size; ++i) {
refs.emplace_back(cv_refs[i].x, cv_refs[i].y);
pts.emplace_back(cv_pts[i].x, cv_pts[i].y);
}
double cost = 0.;
for (std::size_t i = 0u; i < size; ++i) {
std::size_t p = (i + 1u) % size;
// i - p 构成线段。过程:先移动起点,再补长度,再旋转
Eigen::Vector2d ref_d = refs[p] - refs[i]; // 标准
Eigen::Vector2d pt_d = pts[p] - pts[i];
// 长度差代价 + 起点差代价(1 / 2)0 度左右应该抛弃)
double pixel_dis = // dis 是指方差平面内到原点的距离
(0.5 * ((refs[i] - pts[i]).norm() + (refs[p] - pts[p]).norm()) +
std::fabs(ref_d.norm() - pt_d.norm())) /
ref_d.norm();
double angular_dis = ref_d.norm() * tools::get_abs_angle(ref_d, pt_d) / ref_d.norm();
// 平方可能是为了配合 sin 和 cos
// 弧度差代价0 度左右占比应该大)
double cost_i =
tools::square(pixel_dis * std::sin(inclined)) +
tools::square(angular_dis * std::cos(inclined)) * 2.0; // DETECTOR_ERROR_PIXEL_BY_SLOPE
// 重投影像素误差越大,越相信斜率
cost += std::sqrt(cost_i);
}
return cost;
}
double Solver::armor_reprojection_error(
const Armor & armor, double yaw, const double & inclined) const
{
auto image_points = reproject_armor(armor.xyz_in_world, yaw, armor.type, armor.name);
auto error = 0.0;
for (int i = 0; i < 4; i++) error += cv::norm(armor.points[i] - image_points[i]);
// auto error = SJTU_cost(image_points, armor.points, inclined);
return error;
}
// 世界坐标到像素坐标的转换
std::vector<cv::Point2f> Solver::world2pixel(const std::vector<cv::Point3f> & worldPoints)
{
Eigen::Matrix3d R_world2camera = R_camera2gimbal_.transpose() * R_gimbal2world_.transpose();
Eigen::Vector3d t_world2camera = -R_camera2gimbal_.transpose() * t_camera2gimbal_;
cv::Mat rvec;
cv::Mat tvec;
cv::eigen2cv(R_world2camera, rvec);
cv::eigen2cv(t_world2camera, tvec);
std::vector<cv::Point3f> valid_world_points;
for (const auto & world_point : worldPoints) {
Eigen::Vector3d world_point_eigen(world_point.x, world_point.y, world_point.z);
Eigen::Vector3d camera_point = R_world2camera * world_point_eigen + t_world2camera;
if (camera_point.z() > 0) {
valid_world_points.push_back(world_point);
}
}
// 如果没有有效点返回空vector
if (valid_world_points.empty()) {
return std::vector<cv::Point2f>();
}
std::vector<cv::Point2f> pixelPoints;
cv::projectPoints(valid_world_points, rvec, tvec, camera_matrix_, distort_coeffs_, pixelPoints);
return pixelPoints;
}
} // namespace auto_aim

View File

@@ -0,0 +1,212 @@
//
// Created by xinyang on 19-7-18.
//
#include <armor_finder/armor_finder.h>
#include <show_images/show_images.h>
#include <options.h>
#include <opencv2/highgui.hpp>
#define DO_NOT_CNT_TIME
#include <log.h>
// 判断两个灯条的角度差
static bool angelJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle :
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle :
light_blob_j.rect.angle - 90;
return abs(angle_i - angle_j) < 20;
}
// 判断两个灯条的高度差
static bool heightJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
cv::Point2f centers = light_blob_i.rect.center - light_blob_j.rect.center;
return abs(centers.y) < 30;
}
// 判断两个灯条的间距
static bool lengthJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
double side_length;
cv::Point2f centers = light_blob_i.rect.center - light_blob_j.rect.center;
side_length = sqrt(centers.ddot(centers));
return (side_length / light_blob_i.length < 10 && side_length / light_blob_i.length > 0.5);
}
// 判断两个灯条的长度比
static bool lengthRatioJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
return (light_blob_i.length / light_blob_j.length < 2.5
&& light_blob_i.length / light_blob_j.length > 0.4);
}
/* 判断两个灯条的错位度,不知道英文是什么!!! */
static bool CuoWeiDuJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle :
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle :
light_blob_j.rect.angle - 90;
float angle = (angle_i + angle_j) / 2.0 / 180.0 * 3.14159265459;
if (abs(angle_i - angle_j) > 90) {
angle += 3.14159265459 / 2;
}
Vector2f orientation(cos(angle), sin(angle));
Vector2f p2p(light_blob_j.rect.center.x - light_blob_i.rect.center.x,
light_blob_j.rect.center.y - light_blob_i.rect.center.y);
return abs(orientation.dot(p2p)) < 25;
}
// 判断装甲板方向
static bool boxAngleJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle :
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle :
light_blob_j.rect.angle - 90;
float angle = (angle_i + angle_j) / 2.0;
if (abs(angle_i - angle_j) > 90) {
angle += 90.0;
}
return (-120.0 < angle && angle < -60.0) || (60.0 < angle && angle < 120.0);
}
// 判断两个灯条是否可以匹配
static bool isCoupleLight(const LightBlob &light_blob_i, const LightBlob &light_blob_j, uint8_t enemy_color) {
return light_blob_i.blob_color == enemy_color &&
light_blob_j.blob_color == enemy_color &&
lengthRatioJudge(light_blob_i, light_blob_j) &&
lengthJudge(light_blob_i, light_blob_j) &&
// heightJudge(light_blob_i, light_blob_j) &&
angelJudge(light_blob_i, light_blob_j) &&
boxAngleJudge(light_blob_i, light_blob_j) &&
CuoWeiDuJudge(light_blob_i, light_blob_j);
}
// 匹配所有灯条,得出装甲板候选区
bool ArmorFinder::matchArmorBoxes(const cv::Mat &src, const LightBlobs &light_blobs, ArmorBoxes &armor_boxes) {
armor_boxes.clear();
for (int i = 0; i < light_blobs.size() - 1; ++i) {
for (int j = i + 1; j < light_blobs.size(); ++j) {
if (!isCoupleLight(light_blobs.at(i), light_blobs.at(j), enemy_color)) {
continue;
}
cv::Rect2d rect_left = light_blobs.at(static_cast<unsigned long>(i)).rect.boundingRect();
cv::Rect2d rect_right = light_blobs.at(static_cast<unsigned long>(j)).rect.boundingRect();
double min_x, min_y, max_x, max_y;
min_x = fmin(rect_left.x, rect_right.x) - 4;
max_x = fmax(rect_left.x + rect_left.width, rect_right.x + rect_right.width) + 4;
min_y = fmin(rect_left.y, rect_right.y) - 0.5 * (rect_left.height + rect_right.height) / 2.0;
max_y = fmax(rect_left.y + rect_left.height, rect_right.y + rect_right.height) +
0.5 * (rect_left.height + rect_right.height) / 2.0;
if (min_x < 0 || max_x > src.cols || min_y < 0 || max_y > src.rows) {
continue;
}
if (state == SEARCHING_STATE && (max_y + min_y) / 2 < 120) continue;
if ((max_x - min_x) / (max_y - min_y) < 0.8) continue;
LightBlobs pair_blobs = {light_blobs.at(i), light_blobs.at(j)};
// 计算PnP所需的4个角点
const LightBlob &l1 = light_blobs.at(i);
const LightBlob &l2 = light_blobs.at(j);
// 确定左右灯条
const LightBlob &left = (l1.rect.center.x < l2.rect.center.x) ? l1 : l2;
const LightBlob &right = (l1.rect.center.x < l2.rect.center.x) ? l2 : l1;
// 获取灯条的上下顶点
auto getTopBottom = [](const LightBlob &lb, cv::Point2f &top, cv::Point2f &bottom) {
cv::Point2f pts[4];
lb.rect.points(pts);
// 按照y轴排序获取上下端点
std::sort(pts, pts + 4, [](const cv::Point2f &a, const cv::Point2f &b) { return a.y < b.y; });
top = (pts[0] + pts[1]) / 2.0;
bottom = (pts[2] + pts[3]) / 2.0;
};
cv::Point2f lt, lb, rt, rb;
getTopBottom(left, lt, lb);
getTopBottom(right, rt, rb);
// PnP点序: 左上, 右上, 右下, 左下
std::vector<cv::Point2f> armor_points = {lt, rt, rb, lb};
armor_boxes.emplace_back(
cv::Rect2d(min_x, min_y, max_x - min_x, max_y - min_y),
pair_blobs,
enemy_color,
0,
armor_points
);
}
}
return !armor_boxes.empty();
}
// 在给定的图像上寻找装甲板
bool ArmorFinder::findArmorBox(const cv::Mat &src, ArmorBox &box) {
LightBlobs light_blobs; // 存储所有可能的灯条
ArmorBoxes armor_boxes; // 装甲板候选区
box.rect = cv::Rect2d(0, 0, 0, 0);
box.id = -1;
// 寻找所有可能的灯条
CNT_TIME("blob", {
if (!findLightBlobs(src, light_blobs)) {
return false;
}
});
if (show_light_blobs && state==SEARCHING_STATE) {
showLightBlobs("light_blobs", src, light_blobs);
cv::waitKey(1);
}
// 对灯条进行匹配得出装甲板候选区
CNT_TIME("boxes", {
if (!matchArmorBoxes(src, light_blobs, armor_boxes)) {
return false;
}
});
if (show_armor_boxes && state==SEARCHING_STATE) {
showArmorBoxes("boxes", src, armor_boxes);
cv::waitKey(1);
}
// 如果分类器可用,则使用分类器对装甲板候选区进行筛选
if (classifier) {
CNT_TIME("classify: %d", {
for (auto &armor_box : armor_boxes) {
cv::Mat roi = src(armor_box.rect).clone();
cv::resize(roi, roi, cv::Size(48, 36));
int c = classifier(roi);
armor_box.id = c;
}
}, armor_boxes.size());
// 按照优先级对装甲板进行排序
sort(armor_boxes.begin(), armor_boxes.end(), [&](const ArmorBox &a, const ArmorBox &b) {
if (last_box.rect != cv::Rect2d()) {
return getPointLength(a.getCenter() - last_box.getCenter()) <
getPointLength(b.getCenter() - last_box.getCenter());
} else {
return a < b;
}
});
for (auto &one_box : armor_boxes) {
if (one_box.id != 0) {
box = one_box;
break;
}
}
if (save_labelled_boxes) {
for (const auto &one_box : armor_boxes) {
char filename[100];
sprintf(filename, PROJECT_DIR"/armor_box_photo/%s_%d.jpg", id2name[one_box.id].data(),
time(nullptr) + clock());
auto box_roi = src(one_box.rect);
cv::resize(box_roi, box_roi, cv::Size(48, 36));
cv::imwrite(filename, box_roi);
}
}
if (box.rect == cv::Rect2d(0, 0, 0, 0)) {
return false;
}
if (show_armor_boxes && state==SEARCHING_STATE) {
showArmorBoxesClass("class", src, armor_boxes);
}
} else { // 如果分类器不可用,则直接选取候选区中的第一个区域作为目标(往往会误识别)
box = armor_boxes[0];
}
return true;
}

View File

@@ -0,0 +1,152 @@
//
// Created by xinyang on 19-7-10.
//
#include <armor_finder/armor_finder.h>
#include <options.h>
#include <opencv2/highgui.hpp>
// 旋转矩形的长宽比
static double lw_rate(const cv::RotatedRect &rect) {
return rect.size.height > rect.size.width ?
rect.size.height / rect.size.width :
rect.size.width / rect.size.height;
}
// 轮廓面积和其最小外接矩形面积之比
static double areaRatio(const std::vector<cv::Point> &contour, const cv::RotatedRect &rect) {
return cv::contourArea(contour) / rect.size.area();
}
// 判断轮廓是否为一个灯条
static bool isValidLightBlob(const std::vector<cv::Point> &contour, const cv::RotatedRect &rect) {
return (1.2 < lw_rate(rect) && lw_rate(rect) < 10) &&
// (rect.size.area() < 3000) &&
((rect.size.area() < 50 && areaRatio(contour, rect) > 0.4) ||
(rect.size.area() >= 50 && areaRatio(contour, rect) > 0.6));
}
// 判断灯条颜色(此函数可以有性能优化).
static uint8_t get_blob_color(const cv::Mat &src, const cv::RotatedRect &blobPos) {
auto region = blobPos.boundingRect();
region.x -= fmax(3, region.width * 0.1);
region.y -= fmax(3, region.height * 0.05);
region.width += 2 * fmax(3, region.width * 0.1);
region.height += 2 * fmax(3, region.height * 0.05);
region &= cv::Rect(0, 0, src.cols, src.rows);
cv::Mat roi = src(region);
int red_cnt = 0, blue_cnt = 0;
for (int row = 0; row < roi.rows; row++) {
for (int col = 0; col < roi.cols; col++) {
red_cnt += roi.at<cv::Vec3b>(row, col)[2];
blue_cnt += roi.at<cv::Vec3b>(row, col)[0];
}
}
if (red_cnt > blue_cnt) {
return BLOB_RED;
} else {
return BLOB_BLUE;
}
}
// 判断两个灯条区域是同一个灯条
static bool isSameBlob(LightBlob blob1, LightBlob blob2) {
auto dist = blob1.rect.center - blob2.rect.center;
return (dist.x * dist.x + dist.y * dist.y) < 9;
}
// 开闭运算
static void imagePreProcess(cv::Mat &src) {
static cv::Mat kernel_erode = getStructuringElement(cv::MORPH_RECT, cv::Size(3, 5));
erode(src, src, kernel_erode);
static cv::Mat kernel_dilate = getStructuringElement(cv::MORPH_RECT, cv::Size(3, 5));
dilate(src, src, kernel_dilate);
static cv::Mat kernel_dilate2 = getStructuringElement(cv::MORPH_RECT, cv::Size(3, 5));
dilate(src, src, kernel_dilate2);
static cv::Mat kernel_erode2 = getStructuringElement(cv::MORPH_RECT, cv::Size(3, 5));
erode(src, src, kernel_erode2);
}
// 在给定图像上寻找所有可能的灯条
bool ArmorFinder::findLightBlobs(const cv::Mat &src, LightBlobs &light_blobs) {
cv::Mat color_channel;
cv::Mat src_bin_light, src_bin_dim;
std::vector<cv::Mat> channels; // 通道拆分
cv::split(src, channels); /************************/
if (enemy_color == ENEMY_BLUE) { /* */
color_channel = channels[0]; /* 根据目标颜色进行通道提取 */
} else if (enemy_color == ENEMY_RED) { /* */
color_channel = channels[2]; /************************/
}
int light_threshold;
if(enemy_color == ENEMY_BLUE){
light_threshold = 225;
}else{
light_threshold = 200;
}
cv::threshold(color_channel, src_bin_light, light_threshold, 255, cv::THRESH_BINARY); // 二值化对应通道
if (src_bin_light.empty()) return false;
imagePreProcess(src_bin_light); // 开闭运算
cv::threshold(color_channel, src_bin_dim, 140, 255, cv::THRESH_BINARY); // 二值化对应通道
if (src_bin_dim.empty()) return false;
imagePreProcess(src_bin_dim); // 开闭运算
if (src_bin_light.size() == cv::Size(640, 480) && show_light_blobs) {
imshow("bin_light", src_bin_light);
imshow("bin_dim", src_bin_dim);
}
// 使用两个不同的二值化阈值同时进行灯条提取,减少环境光照对二值化这个操作的影响。
// 同时剔除重复的灯条,剔除冗余计算,即对两次找出来的灯条取交集。
std::vector<std::vector<cv::Point>> light_contours_light, light_contours_dim;
LightBlobs light_blobs_light, light_blobs_dim;
std::vector<cv::Vec4i> hierarchy_light, hierarchy_dim;
cv::findContours(src_bin_light, light_contours_light, hierarchy_light, cv::RETR_CCOMP, cv::CHAIN_APPROX_NONE);
cv::findContours(src_bin_dim, light_contours_dim, hierarchy_dim, cv::RETR_CCOMP, cv::CHAIN_APPROX_NONE);
for (int i = 0; i < light_contours_light.size(); i++) {
if (hierarchy_light[i][2] == -1) {
cv::RotatedRect rect = cv::minAreaRect(light_contours_light[i]);
if (isValidLightBlob(light_contours_light[i], rect)) {
light_blobs_light.emplace_back(
rect, areaRatio(light_contours_light[i], rect), get_blob_color(src, rect)
);
}
}
}
for (int i = 0; i < light_contours_dim.size(); i++) {
if (hierarchy_dim[i][2] == -1) {
cv::RotatedRect rect = cv::minAreaRect(light_contours_dim[i]);
if (isValidLightBlob(light_contours_dim[i], rect)) {
light_blobs_dim.emplace_back(
rect, areaRatio(light_contours_dim[i], rect), get_blob_color(src, rect)
);
}
}
}
vector<int> light_to_remove, dim_to_remove;
for (int l = 0; l != light_blobs_light.size(); l++) {
for (int d = 0; d != light_blobs_dim.size(); d++) {
if (isSameBlob(light_blobs_light[l], light_blobs_dim[d])) {
if (light_blobs_light[l].area_ratio > light_blobs_dim[d].area_ratio) {
dim_to_remove.emplace_back(d);
} else {
light_to_remove.emplace_back(l);
}
}
}
}
sort(light_to_remove.begin(), light_to_remove.end(), [](int a, int b) { return a > b; });
sort(dim_to_remove.begin(), dim_to_remove.end(), [](int a, int b) { return a > b; });
for (auto x : light_to_remove) {
light_blobs_light.erase(light_blobs_light.begin() + x);
}
for (auto x : dim_to_remove) {
light_blobs_dim.erase(light_blobs_dim.begin() + x);
}
for (const auto &light : light_blobs_light) {
light_blobs.emplace_back(light);
}
for (const auto &dim : light_blobs_dim) {
light_blobs.emplace_back(dim);
}
return light_blobs.size() >= 2;
}

View File

@@ -0,0 +1,38 @@
//
// Created by xinyang on 19-3-27.
//
#include <armor_finder/armor_finder.h>
#include <show_images/show_images.h>
#include <options.h>
#include <log.h>
bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
if (findArmorBox(src, target_box)) { // 在原图中寻找目标,并返回是否找到
if (last_box.rect != cv::Rect2d() &&
(getPointLength(last_box.getCenter() - target_box.getCenter()) > last_box.rect.height * 2.0) &&
anti_switch_cnt++ < 3) { // 判断当前目标和上次有效目标是否为同一个目标
target_box = ArmorBox(); // 并给3帧的时间,试图找到相同目标
LOGM("anti-switch!"); // 即刚发生目标切换内的3帧内不发送目标位置
return false; // 可以一定程度避免频繁多目标切换
} else {
anti_switch_cnt = 0;
return true;
}
} else {
target_box = ArmorBox();
anti_switch_cnt++;
return false;
}
}
/*
bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
if (findArmorBox(src, target_box)) {
return true;
} else {
target_box = ArmorBox();
return false;
}
}
*/

View File

@@ -0,0 +1,88 @@
//
// Created by xinyang on 19-7-11.
//
#include <armor_finder/armor_finder.h>
#include <config/setconfig.h>
#include <show_images/ballistic_predicition.h>
#include <show_images/auto_trigger.h>
#include <log.h>
static bool sendTarget(Serial &serial, double x, double y, double z, uint16_t shoot_delay) {
static short x_tmp, y_tmp, z_tmp;
uint8_t buff[10];
#ifdef WITH_COUNT_FPS
static time_t last_time = time(nullptr);
static int fps;
time_t t = time(nullptr);
if (last_time != t) {
last_time = t;
cout << "Armor: fps:" << fps << ", (" << x << "," << y << "," << z << ")" << endl;
fps = 0;
}
fps += 1;
#endif
#define MINMAX(value, min, max) value = ((value) < (min)) ? (min) : ((value) > (max) ? (max) : (value))
x_tmp = static_cast<short>(x * (32768 - 1) / 100);
y_tmp = static_cast<short>(y * (32768 - 1) / 100);
z_tmp = static_cast<short>(z * (32768 - 1) / 1000);
buff[0] = 's';
buff[1] = static_cast<char>((x_tmp >> 8) & 0xFF);
buff[2] = static_cast<char>((x_tmp >> 0) & 0xFF);
buff[3] = static_cast<char>((y_tmp >> 8) & 0xFF);
buff[4] = static_cast<char>((y_tmp >> 0) & 0xFF);
buff[5] = static_cast<char>((z_tmp >> 8) & 0xFF);
buff[6] = static_cast<char>((z_tmp >> 0) & 0xFF);
buff[7] = static_cast<char>((shoot_delay >> 8) & 0xFF);
buff[8] = static_cast<char>((shoot_delay >> 0) & 0xFF);
buff[9] = 'e';
// if(buff[7]<<8 | buff[8])
// cout << (buff[7]<<8 | buff[8]) << endl;
return serial.WriteData(buff, sizeof(buff));
}
bool ArmorFinder::sendBoxPosition(uint16_t shoot_delay) {
if (target_box.rect == cv::Rect2d()) return false;
if (shoot_delay) {
LOGM(STR_CTR(WORD_BLUE, "next box %dms"), shoot_delay);
}
auto rect = target_box.rect;
double dx = rect.x + rect.width / 2 - IMAGE_CENTER_X;
double dy = rect.y + rect.height / 2 - IMAGE_CENTER_Y;
// PID
sum_yaw += dx;
sum_pitch += dy;
float yaw_I_component = YAW_AIM_KI * sum_yaw;
float pitch_I_component = PITCH_AIM_KI * sum_pitch;
double tmp_yaw = dx;
double tmp_pitch = dy;
dx = YAW_AIM_KP * dx + YAW_AIM_KI * sum_yaw +
YAW_AIM_KD * (dx - last_yaw);
dy = PITCH_AIM_KP * dy + PITCH_AIM_KI * sum_pitch +
PITCH_AIM_KD * (dy - last_pitch);
last_yaw = tmp_yaw;
last_pitch = tmp_pitch;
//
double yaw = atan(dx / FOCUS_PIXAL) * 180 / PI;
double dist = sqrt(target_xyz.x * target_xyz.x + target_xyz.y * target_xyz.y + target_xyz.z * target_xyz.z) / 100.0; // 米
// 弹道补偿使用 PnP 提供的 3D 坐标
double x_target = sqrt(target_xyz.x * target_xyz.x + target_xyz.z * target_xyz.z) / 100.0; // 水平距离
double y_target = -target_xyz.y / 100.0; // 垂直高度差
double pitch_comp = BallisticSolver::get_pitch(x_target, y_target, MUZZLE_VELOCITY, BALLISTIC_K);
// 计算是否满足开火条件 (例如残差小于 1.5 度)
can_fire = AutoTrigger::should_fire(yaw, pitch_comp, 1.5);
return sendTarget(serial, yaw, pitch_comp, dist * 100.0, shoot_delay);
}

View File

@@ -0,0 +1,11 @@
//
// Created by xinyang on 19-3-27.
//
#include <armor_finder/armor_finder.h>
bool ArmorFinder::stateStandBy() {
state = SEARCHING_STATE;
return true;
}

View File

@@ -0,0 +1,71 @@
//
// Created by xinyang on 19-3-27.
//
#define LOG_LEVEL LOG_NONE
#include <log.h>
#include <options.h>
#include <armor_finder/armor_finder.h>
#include <show_images/show_images.h>
bool ArmorFinder::stateTrackingTarget(cv::Mat &src) {
cv::Rect pos(target_box.rect);
if(!tracker->update(src, pos)){ // 使用KCFTracker进行追踪
target_box = ArmorBox();
LOGW("Track fail!");
return false;
}
if((cv::Rect2d(pos) & cv::Rect2d(0, 0, 640, 480)) != cv::Rect2d(pos)){
target_box = ArmorBox();
LOGW("Track out range!");
return false;
}
// 获取相较于追踪区域两倍长款的区域,用于重新搜索,获取灯条信息
cv::Rect2d bigger_rect;
bigger_rect.x = pos.x - pos.width / 2.0;
bigger_rect.y = pos.y - pos.height / 2.0;
bigger_rect.height = pos.height * 2;
bigger_rect.width = pos.width * 2;
bigger_rect &= cv::Rect2d(0, 0, 640, 480);
cv::Mat roi = src(bigger_rect).clone();
ArmorBox box;
// 在区域内重新搜索。
if(findArmorBox(roi, box)) { // 如果成功获取目标,则利用搜索区域重新更新追踪器
target_box = box;
target_box.rect.x += bigger_rect.x; // 添加roi偏移量
target_box.rect.y += bigger_rect.y;
for(auto &blob : target_box.light_blobs){
blob.rect.center.x += bigger_rect.x;
blob.rect.center.y += bigger_rect.y;
}
for(auto &pt : target_box.points){
pt.x += bigger_rect.x;
pt.y += bigger_rect.y;
}
tracker = TrackerToUse::create();
tracker->init(src, target_box.rect);
}else{ // 如果没有成功搜索目标,则使用判断是否跟丢。
roi = src(pos).clone();
if(classifier){ // 分类器可用,使用分类器判断。
cv::resize(roi, roi, cv::Size(48, 36));
if(classifier(roi) == 0){
target_box = ArmorBox();
LOGW("Track classify fail range!");
return false;
}
}else{ // 分类器不可用,使用常规方法判断
cv::Mat roi_gray;
cv::cvtColor(roi, roi_gray, cv::COLOR_RGB2GRAY);
cv::threshold(roi_gray, roi_gray, 180, 255, cv::THRESH_BINARY);
contour_area = cv::countNonZero(roi_gray);
if(abs(cv::countNonZero(roi_gray) - contour_area) > contour_area * 0.3){
target_box = ArmorBox();
return false;
}
}
target_box.rect = pos;
target_box.light_blobs.clear();
}
return true;
}

View File

@@ -0,0 +1,102 @@
#include <show_images/ballistic_predicition.h>
#include <cmath>
/**
* @brief 迭代法求解补偿 pitch 角。
* 给定水平距离 d 和目标高度差 y反算出应出射的 pitch 角(度)。
*/
double BallisticSolver::get_pitch(double d, double y, double v0, double k) {
const double g = 9.80665;
double pitch_new = atan2(y, d); // 初始猜测:几何仰角
for (int i = 0; i < 20; ++i) {
double cos_p = cos(pitch_new);
double val = 1.0 - (k * d) / (v0 * cos_p);
if (val <= 0) break;
double t = -log(val) / k;
double y_calc = (1.0/k) * (v0*sin(pitch_new) + g/k) * (1.0 - exp(-k*t)) - (g/k)*t;
double dy = y - y_calc;
pitch_new += atan2(dy, d);
if (std::abs(dy) < 0.001) break;
}
return pitch_new * 180.0 / M_PI;
}
/**
* @brief 计算弹丸飞行时间。
* @param d 目标水平距离 (m)
* @param pitch 当前 IMU pitch 角 (度,仰为正)
* @param v0 枪口初速度 (m/s)
* @param k 空气阻力系数
* @brief 计算弹丸飞行时间(含空气阻力模型)。
*
* 运动方程:水平方向 v_x(t) = v0*cos(pitch)*exp(-k*t)
* 水平位移 x(t) = (v0*cos(pitch)/k) * (1 - exp(-k*t))
* 由 x(t) = d 解得 t。
*/
double BallisticSolver::get_flight_time(double d, double pitch, double v0, double k) {
double cos_pitch = cos(pitch * M_PI / 180.0);
double val = 1.0 - (k * d) / (v0 * cos_pitch);
if (val <= 0) return 0.5; // 超过最大射程,返回兜底时间
return -log(val) / k;
}
/**
* @brief 推断给定出射角下的落点高度(弹着点推断)。
*/
double BallisticSolver::get_impact_y(double d, double pitch, double v0, double k) {
const double g = 9.80665;
double pitch_rad = pitch * M_PI / 180.0;
double t = get_flight_time(d, pitch, v0, k);
return (1.0 / k) * (v0 * sin(pitch_rad) + g / k) * (1.0 - exp(-k * t)) - (g / k) * t;
}
// ──────────────────── [TEST] 以下两个函数用于可视化验证弹道算法正确性 ────────────────────
// 如果测试通过,直接注释掉 armor_finder.cpp 里的调用,不影响其他逻辑。
/**
* [TEST] 将弹着点从世界坐标系反解算到相机坐标系。
* 变换链xyz_camera = R_c2g^T * (R_g2w^T * xyz_world - t_c2g)
*/
ImpactPoint BallisticSolver::get_impact_in_camera(
const Eigen::Vector3d& xyz_in_world,
double pitch_imu,
double v0,
const Eigen::Matrix3d& R_camera2gimbal,
const Eigen::Matrix3d& R_gimbal2world,
const Eigen::Vector3d& t_camera2gimbal,
double k)
{
// 水平距离:世界系 x=前, y=左
double d = std::sqrt(xyz_in_world.x() * xyz_in_world.x() +
xyz_in_world.y() * xyz_in_world.y());
double impact_z = get_impact_y(d, pitch_imu, v0, k);
double fly_time = get_flight_time(d, pitch_imu, v0, k);
// 弹着点世界坐标:水平 (x,y) 与目标相同z 换为弹道高度
Eigen::Vector3d impact_world(xyz_in_world.x(), xyz_in_world.y(), impact_z);
// 世界 → 云台 → 相机
Eigen::Vector3d impact_gimbal = R_gimbal2world.transpose() * impact_world;
Eigen::Vector3d impact_camera = R_camera2gimbal.transpose() * (impact_gimbal - t_camera2gimbal);
return ImpactPoint{ impact_camera, fly_time };
}
/**
* [TEST] 针孔投影:相机坐标系 3D 点 → 像素坐标(不含畸变,仅用于可视化)。
*/
cv::Point2f BallisticSolver::project_to_pixel(
const Eigen::Vector3d& xyz_in_camera,
const cv::Mat& camera_matrix)
{
if (xyz_in_camera.z() <= 0) return cv::Point2f(-1, -1); // 在相机背后,无效
double fx = camera_matrix.at<double>(0, 0);
double fy = camera_matrix.at<double>(1, 1);
double cx = camera_matrix.at<double>(0, 2);
double cy = camera_matrix.at<double>(1, 2);
float u = static_cast<float>(fx * xyz_in_camera.x() / xyz_in_camera.z() + cx);
float v = static_cast<float>(fy * xyz_in_camera.y() / xyz_in_camera.z() + cy);
return cv::Point2f(u, v);
}
// ──────────────────────────────── [TEST END] ─────────────────────────────────────────────

View File

@@ -0,0 +1,182 @@
#include <show_images/show_images.h>
#include <opencv2/highgui.hpp>
#include <options.h>
#include <log.h>
using namespace cv;
void drawLightBlobs(cv::Mat &src, const LightBlobs &blobs){
for (const auto &blob:blobs) {
Scalar color(0,255,0);
if (blob.blob_color == BLOB_RED)
color = Scalar(0, 0, 255);
else if (blob.blob_color == BLOB_BLUE)
color = Scalar(255, 0, 0);
cv::Point2f vertices[4];
blob.rect.points(vertices);
for (int j = 0; j < 4; j++) {
cv::line(src, vertices[j], vertices[(j + 1) % 4], color, 2);
}
}
}
/**************************
* 显示多个装甲板区域 *
**************************/
void showArmorBoxes(std::string windows_name, const cv::Mat &src, const ArmorBoxes &armor_boxes) {
static Mat image2show;
if (src.type() == CV_8UC1) {// 黑白图像
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) { //RGB 彩色
image2show = src.clone();
}
for (auto &box:armor_boxes) {
if(box.box_color == BOX_BLUE) {
rectangle(image2show, box.rect, Scalar(0, 255, 0), 1);
drawLightBlobs(image2show, box.light_blobs);
}else if(box.box_color == BOX_RED){
rectangle(image2show, box.rect, Scalar(0, 255, 0), 1);
drawLightBlobs(image2show, box.light_blobs);
}
}
imshow(windows_name, image2show);
}
/**************************
* 显示多个装甲板区域及其类别 *
**************************/
void showArmorBoxesClass(std::string window_names, const cv::Mat &src, const ArmorBoxes &boxes) {
static Mat image2show;
if (src.type() == CV_8UC1) { // 黑白图像
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) { //RGB 彩色
image2show = src.clone();
}
for (const auto &box : boxes) {
if(box.id != 0) {
rectangle(image2show, box.rect, Scalar(0, 255, 0), 1);
drawLightBlobs(image2show, box.light_blobs);
if (box.id == -1)
putText(image2show, id2name[box.id], Point(box.rect.x + 2, box.rect.y + 2), cv::FONT_HERSHEY_TRIPLEX, 1,
Scalar(0, 255, 0));
else if (1 <= box.id && box.id < 8)
putText(image2show, id2name[box.id], Point(box.rect.x + 2, box.rect.y + 2), cv::FONT_HERSHEY_TRIPLEX, 1,
Scalar(255, 0, 0));
else if (8 <= box.id && box.id < 15)
putText(image2show, id2name[box.id], Point(box.rect.x + 2, box.rect.y + 2), cv::FONT_HERSHEY_TRIPLEX, 1,
Scalar(0, 0, 255));
else if (box.id != 0)
LOGE_INFO("Invalid box id:%d!", box.id);
}
}
imshow(window_names, image2show);
}
/**************************
* 显示单个装甲板区域及其类别 *
**************************/
void showArmorBox(std::string windows_name, const cv::Mat &src, const ArmorBox &box) {
static Mat image2show;
if(box.rect == cv::Rect2d()){
imshow(windows_name, src);
}
if (src.type() == CV_8UC1) { // 黑白图像
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) { //RGB 彩色
image2show = src.clone();
}
drawLightBlobs(image2show, box.light_blobs);
// static FILE *fp = fopen(PROJECT_DIR"/ratio.txt", "w");
// if(box.light_blobs.size() == 2)
// fprintf(fp, "%lf %lf %lf\n", box.light_blobs[0].length, box.light_blobs[1].length, box.getBlobsDistance())
// cout << box.lengthDistanceRatio() << endl;
if(box.getOrientation() == ArmorBox::FRONT){
rectangle(image2show, box.rect, Scalar(0, 255, 0), 3);
}else{
rectangle(image2show, box.rect, Scalar(0, 255, 0), 1);
}
char dist[10];
sprintf(dist, "%.1f", box.getBoxDistance());
if (box.id == -1)
putText(image2show, id2name[box.id]+" "+dist, Point(box.rect.x + 2, box.rect.y + 2), cv::FONT_HERSHEY_TRIPLEX, 1,
Scalar(0, 255, 0));
else if (1 <= box.id && box.id < 8)
putText(image2show, id2name[box.id]+" "+dist, Point(box.rect.x + 2, box.rect.y + 2), cv::FONT_HERSHEY_TRIPLEX, 1,
Scalar(255, 0, 0));
else if (8 <= box.id && box.id < 15)
putText(image2show, id2name[box.id]+" "+dist, Point(box.rect.x + 2, box.rect.y + 2), cv::FONT_HERSHEY_TRIPLEX, 1,
Scalar(0, 0, 255));
else if (box.id != 0)
LOGE_INFO("Invalid box id:%d!", box.id);
imshow(windows_name, image2show);
}
/**************************
* 显示多个灯条区域 *
**************************/
void showLightBlobs(std::string windows_name, const cv::Mat &src, const LightBlobs &light_blobs) {
static Mat image2show;
if (src.type() == CV_8UC1) { // 黑白图像
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) { //RGB 彩色
image2show = src.clone();
}
for (const auto &light_blob:light_blobs) {
Scalar color(0, 255, 0);
if (light_blob.blob_color == BLOB_RED)
color = Scalar(0, 0, 255);
else if (light_blob.blob_color == BLOB_BLUE)
color = Scalar(255, 0, 0);
cv::Point2f vertices[4];
light_blob.rect.points(vertices);
for (int j = 0; j < 4; j++) {
cv::line(image2show, vertices[j], vertices[(j + 1) % 4], color, 2);
}
}
imshow(windows_name, image2show);
}
void showTrackSearchingPos(std::string window_names, const cv::Mat &src, const cv::Rect2d pos){
static Mat image2show;
if (src.type() == CV_8UC1) { // 黑白图像
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) { //RGB 彩色
image2show = src.clone();
}
rectangle(image2show, pos, Scalar(0, 255, 0), 1);
imshow(window_names, image2show);
}
// ─────────────── [TEST] 弹着点可视化 ───────────────────────────────────────────────────────
// 在图像上画一个红色小圆点标注弹道落点位置,用于验证弹道算法正确性。
// 验证通过后直接注释调用处即可,函数本身不影响任何发包逻辑。
void showImpactPoint(const std::string& window_name, const cv::Mat& src, cv::Point2f impact_pixel) {
static Mat image2show;
if (src.type() == CV_8UC1) {
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) {
image2show = src.clone();
} else {
return;
}
// 越界检查
int u = static_cast<int>(impact_pixel.x);
int v = static_cast<int>(impact_pixel.y);
if (u >= 0 && u < image2show.cols && v >= 0 && v < image2show.rows) {
// 红色实心圆,半径 3px
cv::circle(image2show, cv::Point(u, v), 3, Scalar(0, 0, 255), -1);
}
imshow(window_name, image2show);
}
// ─────────────── [TEST END] ────────────────────────────────────────────────────────────────