6 Commits

Author SHA1 Message Date
lyf
8f5ced6be3 1 2025-12-05 13:48:14 +08:00
601e248cd6 删了垃圾文件 2025-12-05 13:43:53 +08:00
93935889e0 修改了一点参数 2025-12-05 13:27:42 +08:00
b4c795f77d 参考上海交大 2025-12-05 11:24:23 +08:00
3f60b1f564 参考上海交通大学 2025-12-05 11:20:41 +08:00
f6e7d37da9 参考上海交通大学代码进行修改 2025-12-05 11:13:37 +08:00
3 changed files with 127 additions and 36 deletions

View File

@@ -23,6 +23,23 @@ struct ArmorPlate {
double confidence; double confidence;
std::pair<LightBar, LightBar> pair; std::pair<LightBar, LightBar> pair;
std::vector<cv::Point2f> corners_2d; // Can be computed later for 3D pose std::vector<cv::Point2f> corners_2d; // Can be computed later for 3D pose
// Function to get bounding rectangle of the armor plate
cv::Rect2d getBoundingRect() const {
if (corners_2d.size() >= 4) {
cv::Point2f min_pt = corners_2d[0];
cv::Point2f max_pt = corners_2d[0];
for (const auto& pt : corners_2d) {
min_pt.x = std::min(min_pt.x, pt.x);
min_pt.y = std::min(min_pt.y, pt.y);
max_pt.x = std::max(max_pt.x, pt.x);
max_pt.y = std::max(max_pt.y, pt.y);
}
return cv::Rect2d(min_pt.x, min_pt.y, max_pt.x - min_pt.x, max_pt.y - min_pt.y);
}
// Fallback: use center and a fixed size
return cv::Rect2d(center.x - 30, center.y - 15, 60, 30);
}
}; };
class ArmorDetector { class ArmorDetector {

View File

@@ -36,6 +36,9 @@ const float ARMOR_ANGLE_DIFF_THRESHOLD = 15.0f * CV_PI / 180.0f; // Armor light
const float KF_PROCESS_NOISE = 0.02f; // Process noise covariance const float KF_PROCESS_NOISE = 0.02f; // Process noise covariance
const float KF_MEASUREMENT_NOISE = 0.5f; // Measurement noise covariance const float KF_MEASUREMENT_NOISE = 0.5f; // Measurement noise covariance
// Focal length in pixels (from camera calibration)
const double FOCAL_PIXAL = 600.0; // This should match your actual camera calibration
// TTL communication settings // TTL communication settings
const int TTL_BAUDRATE = 115200; const int TTL_BAUDRATE = 115200;

View File

@@ -6,8 +6,9 @@
#include <thread> #include <thread>
#include <memory> #include <memory>
#include <unistd.h> #include <unistd.h>
#include <math.h>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <opencv2/tracking.hpp>
#include "config.h" #include "config.h"
#include "MindVisionCamera.h" // 使用MindVision相机 #include "MindVisionCamera.h" // 使用MindVision相机
@@ -21,30 +22,34 @@
// Function to output control data to TTL device (with enable control) // Function to output control data to TTL device (with enable control)
void output_control_data(const cv::Point2f* ballistic_point, void output_control_data(const cv::Point2f* ballistic_point,
const std::string& target_color, const std::string& target_color,
int frame_counter,
TTLCommunicator* ttl_communicator, TTLCommunicator* ttl_communicator,
const cv::Point2f& img_center, const cv::Point2f& img_center,
bool use_ttl) { bool use_ttl) {
// Only send data when TTL is enabled, meets frame interval, and has valid target // Only send data when TTL is enabled and has valid target
if (use_ttl && frame_counter % 5 == 0 && ballistic_point != nullptr) { if (use_ttl && ballistic_point != nullptr) {
std::ostringstream send_str; std::ostringstream send_str;
// Calculate offset (based on actual image center) // Calculate offset (based on actual image center)
int ballistic_offset_x = static_cast<int>(ballistic_point->x - img_center.x); int ballistic_offset_yaw = 1.9*-static_cast<int>(ballistic_point->x - img_center.x);
int ballistic_offset_y = static_cast<int>(img_center.y - ballistic_point->y); if ( abs(ballistic_offset_yaw) > 320){
ballistic_offset_yaw = ( ballistic_offset_yaw / abs( ballistic_offset_yaw ) ) * 220 ;
}
int ballistic_offset_pitch = 1.9*-static_cast<int>(img_center.y - ballistic_point->y);
if ( abs(ballistic_offset_pitch) > 180 ) {
ballistic_offset_pitch = ( ballistic_offset_pitch / abs( ballistic_offset_pitch ) ) * 180*1.9 ;
}
// Color simplification mapping // Color simplification mapping
std::string simplified_color = target_color; std::string simplified_color = target_color;
if (target_color == "red") simplified_color = "r"; if (target_color == "red") simplified_color = "r";
else if (target_color == "blue") simplified_color = "b"; else if (target_color == "blue") simplified_color = "b";
// Construct send string // Construct send string (original format)
send_str << "s " << simplified_color << " " << std::to_string(ballistic_offset_x) << " " << std::to_string(ballistic_offset_y) << "\n"; send_str << "#s " << simplified_color << " " << std::to_string(ballistic_offset_yaw) << " " << std::to_string(ballistic_offset_pitch) << "*\n";
// Send data // Send data
if (ttl_communicator != nullptr) { if (ttl_communicator != nullptr) {
ttl_communicator->send_data(send_str.str()); ttl_communicator->send_data(send_str.str());
}else{ }else{
std::cerr << "Error: TTLCommunicator is a null pointer!" << std::endl; std::cerr << "Error: TTLCommunicator is a null pointer!" << std::endl;
} }
@@ -66,25 +71,16 @@ int main(int /*argc*/, char* /*argv*/[]) {
static int Numbe = 0; static int Numbe = 0;
std::string target_color = "red"; std::string target_color = "red";
int cam_id = 0; int cam_id = 0;
cv::Size default_resolution(1280, 720); cv::Size default_resolution(640, 480); // Changed to 640x480 for consistency with SJTU project
bool use_ttl = false; // Set to false to disable TTL communication bool use_ttl = false; // Set to false to disable TTL communication
if (Numbe == 0) { if (Numbe == 0) {
// 执行 shell 命令(注意安全风险!) // 执行 shell 命令(注意安全风险!)
int result = std::system("soude chmod 777 /dev/tty*"); std::system("sudo chmod 777 /dev/tty*");
// 可选:检查命令是否成功执行
if (result == -1) {
std::cerr << "Failed to execute system command.\n";
} else {
std::cout << "Permissions updated (if any tty devices exist).\n";
}
Numbe++; Numbe++;
} }
return 0;
// Define optional resolution list (adjust based on camera support) // Define optional resolution list (adjust based on camera support)
std::vector<cv::Size> resolutions = { std::vector<cv::Size> resolutions = {
cv::Size(320, 240), // Low resolution, high frame rate cv::Size(320, 240), // Low resolution, high frame rate
@@ -139,6 +135,13 @@ int main(int /*argc*/, char* /*argv*/[]) {
KalmanFilter kalman_tracker; KalmanFilter kalman_tracker;
Visualizer visualizer; Visualizer visualizer;
// Initialize KCF tracker
cv::Ptr<cv::Tracker> tracker = cv::TrackerKCF::create();
bool is_tracking = false;
cv::Rect2d tracking_roi;
int tracking_frame_count = 0;
const int MAX_TRACKING_FRAMES = 100; // Maximum frames to track before returning to search
int frame_counter = 0; // Counter to control output frequency int frame_counter = 0; // Counter to control output frequency
int max_consecutive_predicts = 20; // Maximum consecutive prediction times int max_consecutive_predicts = 20; // Maximum consecutive prediction times
int consecutive_predicts = 0; // Current consecutive prediction count int consecutive_predicts = 0; // Current consecutive prediction count
@@ -165,30 +168,90 @@ int main(int /*argc*/, char* /*argv*/[]) {
cv::Mat color_only_frame; cv::Mat color_only_frame;
frame.copyTo(color_only_frame, raw_mask); frame.copyTo(color_only_frame, raw_mask);
// Armor plate detection // Initialize tracking center
cv::Point2f* tracking_center = nullptr;
std::unique_ptr<cv::Point2f> tracking_point = nullptr;
if (is_tracking) {
// Update tracker
bool success = tracker->update(frame, tracking_roi);
if (success && tracking_roi.area() > 0) {
// Calculate center of tracked ROI
tracking_point = std::make_unique<cv::Point2f>(
tracking_roi.x + tracking_roi.width / 2.0f,
tracking_roi.y + tracking_roi.height / 2.0f
);
tracking_center = tracking_point.get();
tracking_frame_count++;
// If tracking for too long or detection is available, search for armor again
if (tracking_frame_count > MAX_TRACKING_FRAMES) {
is_tracking = false;
tracking_frame_count = 0;
}
} else {
// Tracking failed, return to detection mode
is_tracking = false;
tracking_frame_count = 0;
}
}
// Armor plate detection - only when not tracking or need to verify tracking
std::vector<LightBar> valid_light_bars; std::vector<LightBar> valid_light_bars;
std::vector<ArmorPlate> armor_plates; std::vector<ArmorPlate> armor_plates;
if (!is_tracking || tracking_frame_count % 10 == 0) { // Detect every 10 frames when tracking
detector.detect(mask, target_color, valid_light_bars, armor_plates); detector.detect(mask, target_color, valid_light_bars, armor_plates);
}
// Kalman filter tracking (modified part: get target speed) // Kalman filter tracking (modified part: get target speed)
cv::Point2f* current_center = nullptr; cv::Point2f* detection_center = nullptr;
if (!armor_plates.empty()) { if (!armor_plates.empty()) {
current_center = &(armor_plates[0].center); detection_center = &(armor_plates[0].center);
} }
// Use smart pointer for safer memory management // Use smart pointer for safer memory management
std::unique_ptr<cv::Point2f> predicted_center = nullptr; std::unique_ptr<cv::Point2f> predicted_center = nullptr;
if (current_center != nullptr) { // Priority: detection -> tracking -> Kalman prediction
if (detection_center != nullptr) {
// Has detection result: update Kalman filter, reset consecutive prediction count // Has detection result: update Kalman filter, reset consecutive prediction count
kalman_tracker.update(*current_center); kalman_tracker.update(*detection_center);
predicted_center = std::make_unique<cv::Point2f>(kalman_tracker.predict()); predicted_center = std::make_unique<cv::Point2f>(kalman_tracker.predict());
// Get velocity from the Kalman filter // Start tracking if successfully detected
target_speed = cv::Point2f(0.0f, 0.0f); // Kalman filter in OpenCV doesn't directly expose velocity if (!is_tracking && !armor_plates.empty()) {
cv::Rect2d armor_rect = cv::boundingRect(std::vector<cv::Point2f>{
armor_plates[0].corners_2d[0],
armor_plates[0].corners_2d[1],
armor_plates[0].corners_2d[2],
armor_plates[0].corners_2d[3]
});
// Expand the bounding box slightly for better tracking
cv::Rect2d expanded_rect = cv::Rect2d(
armor_rect.x - armor_rect.width * 0.1,
armor_rect.y - armor_rect.height * 0.1,
armor_rect.width * 1.2,
armor_rect.height * 1.2
);
// Ensure the rectangle is within frame bounds
expanded_rect = expanded_rect & cv::Rect2d(0, 0, frame.cols, frame.rows);
tracker = cv::TrackerKCF::create(); // Create new tracker instance
tracker->init(frame, expanded_rect);
tracking_roi = expanded_rect;
is_tracking = true;
tracking_frame_count = 0;
}
consecutive_predicts = 0;
} else if (is_tracking && tracking_center != nullptr) {
// Use tracking result
kalman_tracker.update(*tracking_center);
predicted_center = std::make_unique<cv::Point2f>(kalman_tracker.predict());
consecutive_predicts = 0; consecutive_predicts = 0;
} else { } else {
// No detection result: only use Kalman prediction, limit consecutive predictions // No detection or tracking result: only use Kalman prediction, limit consecutive predictions
consecutive_predicts++; consecutive_predicts++;
if (consecutive_predicts < max_consecutive_predicts) { if (consecutive_predicts < max_consecutive_predicts) {
cv::Point2f temp_pred = kalman_tracker.predict(); cv::Point2f temp_pred = kalman_tracker.predict();
@@ -202,11 +265,14 @@ int main(int /*argc*/, char* /*argv*/[]) {
} }
// Determine display center // Determine display center
cv::Point2f* display_center = current_center; cv::Point2f* display_center = detection_center; // Give priority to detection results
if (display_center == nullptr && is_tracking && tracking_center != nullptr) {
display_center = tracking_center;
}
if (display_center == nullptr && predicted_center != nullptr) { if (display_center == nullptr && predicted_center != nullptr) {
display_center = predicted_center.get(); display_center = predicted_center.get();
} }
bool is_predicted = (display_center != nullptr) && (current_center == nullptr); bool is_predicted = (display_center != nullptr) && (detection_center == nullptr && (!is_tracking || tracking_center == nullptr));
// Calculate ballistic prediction point // Calculate ballistic prediction point
cv::Point2f* ballistic_point = ballistic_predictor.predict_ballistic_point( cv::Point2f* ballistic_point = ballistic_predictor.predict_ballistic_point(
@@ -220,16 +286,21 @@ int main(int /*argc*/, char* /*argv*/[]) {
if (!armor_plates.empty()) { if (!armor_plates.empty()) {
visualizer.draw_armor_plate(frame, armor_plates[0]); visualizer.draw_armor_plate(frame, armor_plates[0]);
} }
// Draw tracking rectangle if tracking
if (is_tracking) {
cv::rectangle(frame, tracking_roi, cv::Scalar(0, 255, 0), 2);
}
visualizer.draw_offset_text(frame, display_center, target_color, is_predicted); visualizer.draw_offset_text(frame, display_center, target_color, is_predicted);
visualizer.draw_ballistic_point(frame, ballistic_point); visualizer.draw_ballistic_point(frame, ballistic_point);
// Output control data to TTL (passing use_ttl to control whether to send) // Output control data to TTL (passing use_ttl to control whether to send)
output_control_data(display_center, target_color, frame_counter, ttl, img_center, use_ttl); // Now sending on every frame for smoother control
output_control_data(display_center, target_color, ttl, img_center, use_ttl);
// Display windows // Display windows
cv::imshow("Armor Detection", frame);
cv::imshow(target_color + " Mask", mask); cv::imshow(target_color + " Mask", mask);
cv::imshow(target_color + " Only", color_only_frame); cv::imshow(target_color + " Only", color_only_frame);
cv::imshow("Armor Detection", frame);
// Exit on 'q' key press // Exit on 'q' key press
if (cv::waitKey(1) == 'q') { if (cv::waitKey(1) == 'q') {