Merge pull request '改到能跑' (#1) from Pro into main

Reviewed-on: #1
This commit is contained in:
lyf
2025-11-20 19:42:42 +08:00
17 changed files with 476 additions and 470 deletions

3
.gitignore vendored
View File

@@ -34,4 +34,5 @@
# BUILD # BUILD
build/ build/
.cache/ .cache/
.vscode/

View File

@@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.10)
project(armor_detector_mdv) project(armor_detector_mdv)
# Set C++ standard # Set C++ standard
set(CMAKE_CXX_STANDARD 11) set(CMAKE_CXX_STANDARD 23)
set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_STANDARD_REQUIRED ON)
# Find OpenCV package # Find OpenCV package
@@ -15,7 +15,7 @@ include_directories(./inc)
# Add executable for MindVision version # Add executable for MindVision version
add_executable(armor_detector_mdv add_executable(armor_detector_mdv
src/main.cpp src/MindVisionMain.cpp
src/MindVisionCamera.cpp src/MindVisionCamera.cpp
src/ImagePreprocessor.cpp src/ImagePreprocessor.cpp
src/ArmorDetector.cpp src/ArmorDetector.cpp
@@ -29,7 +29,6 @@ add_executable(armor_detector_mdv
target_link_libraries(armor_detector_mdv ${OpenCV_LIBS} -lutil -lpthread) target_link_libraries(armor_detector_mdv ${OpenCV_LIBS} -lutil -lpthread)
# Link MindVision SDK library # Link MindVision SDK library
link_directories("/lib")
target_link_libraries(armor_detector_mdv MVSDK) target_link_libraries(armor_detector_mdv MVSDK)
# Additional flags for better compilation # Additional flags for better compilation

View File

@@ -6,21 +6,22 @@
class BallisticPredictor { class BallisticPredictor {
public: public:
BallisticPredictor(float bullet_speed = 16.0f); BallisticPredictor(float bullet_speed = 16.0f);
~BallisticPredictor(); // Added destructor to properly manage memory
// Predict ballistic point considering target movement and flight time // Predict ballistic point considering target movement and flight time
cv::Point2f* predict_ballistic_point(const cv::Point2f* predicted_center, cv::Point2f* predict_ballistic_point(const cv::Point2f* predicted_center,
const cv::Point2f& img_center, const cv::Point2f& img_center,
const cv::Point2f& target_speed); const cv::Point2f& target_speed);
// Get last ballistic point // Get last ballistic point
const cv::Point2f* get_last_ballistic_point() const { return last_ballistic_point; } const cv::Point2f* get_last_ballistic_point() const { return last_ballistic_point; }
private: private:
float bullet_speed; // Bullet speed (m/s) float bullet_speed; // Bullet speed (m/s)
float armor_half_width; // Armor half width (converted to meters) float armor_half_width; // Armor half width (converted to meters)
float armor_half_height; // Armor half height (converted to meters) float armor_half_height; // Armor half height (converted to meters)
cv::Point2f* last_ballistic_point; // Last ballistic prediction cv::Point2f* last_ballistic_point; // Last ballistic prediction
// Calculate distance to target // Calculate distance to target
float calculate_distance(const cv::Point2f& armor_center, const cv::Point2f& img_center, float focal_length = 800.0f); float calculate_distance(const cv::Point2f& armor_center, const cv::Point2f& img_center, float focal_length = 800.0f);
}; };

View File

@@ -8,16 +8,12 @@ class ImagePreprocessor {
public: public:
ImagePreprocessor(); ImagePreprocessor();
~ImagePreprocessor(); ~ImagePreprocessor();
// Get mask based on target color // Apply morphological operations to a mask
cv::Mat get_mask(const cv::Mat& frame, const std::string& target_color); void apply_morphology(const cv::Mat& input_mask, cv::Mat& output_mask);
// Get frame with only the target color // Apply Gaussian blur to reduce noise
cv::Mat get_color_only_frame(const cv::Mat& frame, const std::string& target_color); void apply_gaussian_blur(const cv::Mat& input, cv::Mat& output, int kernel_size = 6);
// Combined function to return both mask and color-only frame
void process_frame(const cv::Mat& frame, const std::string& target_color,
cv::Mat& mask, cv::Mat& color_only_frame);
}; };
#endif // IMAGEPREPROCESSOR_H #endif // IMAGEPREPROCESSOR_H

View File

@@ -6,12 +6,16 @@
class KalmanFilter { class KalmanFilter {
public: public:
KalmanFilter(); KalmanFilter();
void update(const cv::Point2f& measurement); void update(const cv::Point2f& measurement);
cv::Point2f predict(); cv::Point2f predict();
// Method to update the process noise based on target movement characteristics
void updateProcessNoise(const cv::Point2f& current_measurement);
cv::Point2f get_last_measurement() const { return last_measurement; } cv::Point2f get_last_measurement() const { return last_measurement; }
cv::Point2f get_last_prediction() const { return last_prediction; } cv::Point2f get_last_prediction() const { return last_prediction; }
cv::Point2f get_current_velocity() const; // New method to get current velocity
bool is_initialized() const { return initialized; } bool is_initialized() const { return initialized; }
private: private:
@@ -19,8 +23,16 @@ private:
bool initialized; bool initialized;
cv::Point2f last_measurement; cv::Point2f last_measurement;
cv::Point2f last_prediction; cv::Point2f last_prediction;
cv::Point2f prev_measurement;
bool has_prev_measurement;
// Store previous states for velocity and acceleration estimation
cv::Point2f prev_velocity;
double time_elapsed; // Time in seconds between measurements
bool has_prev_velocity;
void init_params(); void init_params();
cv::Point2f estimate_velocity(const cv::Point2f& current, const cv::Point2f& previous);
}; };
#endif // KALMANFILTER_H #endif // KALMANFILTER_H

View File

@@ -3,6 +3,7 @@
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <string> #include <string>
#include <memory>
// MindVision SDK 头文件 - 可能需要根据实际SDK文件调整 // MindVision SDK 头文件 - 可能需要根据实际SDK文件调整
extern "C" { extern "C" {
@@ -17,12 +18,14 @@ public:
int width; int width;
int height; int height;
int fps; int fps;
unsigned char* g_pRgbBuffer; // 处理后数据缓存区
MindVisionCamera(int cam_id = 0, const std::string& target_color = "red"); MindVisionCamera(int cam_id = 0, const std::string& target_color = "red");
~MindVisionCamera(); ~MindVisionCamera();
void set_cam_params(); bool set_cam_params();
bool read_frame(cv::Mat& frame); bool read_frame(cv::Mat& frame);
bool read_frame_with_color_filter(cv::Mat& frame, cv::Mat& mask, const std::string& target_color);
void release(); void release();
bool switch_color(const std::string& target_color); bool switch_color(const std::string& target_color);

View File

@@ -3,17 +3,11 @@
#include <string> #include <string>
// Placeholder for TTL communication class
// In a real implementation, you would use a C++ serial library like:
// - serial (https://github.com/wjwwood/serial)
// - Boost.Asio
// - or a system-specific approach
class TTLCommunicator { class TTLCommunicator {
public: public:
TTLCommunicator(int baudrate = 115200); TTLCommunicator(const std::string& port_name = "/dev/ttyUSB0", int baudrate = 115200);
~TTLCommunicator(); ~TTLCommunicator();
bool connect(); bool connect();
void close(); void close();
bool send_data(const std::string& data); bool send_data(const std::string& data);
@@ -23,8 +17,9 @@ private:
std::string port_name; std::string port_name;
int baudrate; int baudrate;
bool connected; bool connected;
// These would be implemented with actual serial communication code int serial_fd;
bool open_serial_port(); bool open_serial_port();
void close_serial_port(); void close_serial_port();
}; };

View File

@@ -28,9 +28,9 @@ const float HORIZONTAL_ANGLE_THRESHOLD_RAD = HORIZONTAL_ANGLE_THRESHOLD * CV_PI
const float NEARBY_LIGHT_BAR_THRESHOLD = 500.0f; // Light bar merging distance threshold (pixels) const float NEARBY_LIGHT_BAR_THRESHOLD = 500.0f; // Light bar merging distance threshold (pixels)
const float LIGHT_BAR_IOU_THRESHOLD = 0.05f; // Light bar merging IOU threshold const float LIGHT_BAR_IOU_THRESHOLD = 0.05f; // Light bar merging IOU threshold
const float ARMOR_DISTANCE_RATIO_MIN = 0.6f; // Armor light bar distance ratio range const float ARMOR_DISTANCE_RATIO_MIN = 0.6f; // Armor light bar distance ratio range
const float ARMOR_DISTANCE_RATIO_MAX = 3.0f; const float ARMOR_DISTANCE_RATIO_MAX = 4.0f;
const float ARMOR_LENGTH_DIFF_RATIO = 0.5f; // Armor light bar length difference ratio const float ARMOR_LENGTH_DIFF_RATIO = 0.5f; // Armor light bar length difference ratio
const float ARMOR_ANGLE_DIFF_THRESHOLD = 20.0f * CV_PI / 180.0f; // Armor light bar angle difference threshold (radians) const float ARMOR_ANGLE_DIFF_THRESHOLD = 15.0f * CV_PI / 180.0f; // Armor light bar angle difference threshold (radians)
// Kalman filter parameters // Kalman filter parameters
const float KF_PROCESS_NOISE = 0.02f; // Process noise covariance const float KF_PROCESS_NOISE = 0.02f; // Process noise covariance

View File

@@ -1,4 +1,5 @@
#include "ArmorDetector.h" #include "ArmorDetector.h"
#include <iostream>
#include <cmath> #include <cmath>
#include <algorithm> #include <algorithm>
@@ -143,8 +144,10 @@ std::vector<LightBar> ArmorDetector::filter_valid_light_bars(const std::vector<s
float bar_width = width > height ? height : width; float bar_width = width > height ? height : width;
float main_angle = rect.angle; float main_angle = rect.angle;
if (width <= height) { if (width > height) {
main_angle += 90.0; // 保持原始角度不变
} else {
main_angle += 90;
} }
// Normalize angle to [-90, 90] // Normalize angle to [-90, 90]

View File

@@ -2,20 +2,29 @@
#include "config.h" #include "config.h"
#include <cmath> #include <cmath>
BallisticPredictor::BallisticPredictor(float bullet_speed) BallisticPredictor::BallisticPredictor(float bullet_speed)
: bullet_speed(bullet_speed), last_ballistic_point(nullptr) { : bullet_speed(bullet_speed), last_ballistic_point(nullptr) {
armor_half_width = ARMOR_WIDTH / 2000.0f; // Convert to meters armor_half_width = ARMOR_WIDTH / 2000.0f; // Convert to meters
armor_half_height = ARMOR_HEIGHT / 2000.0f; // Convert to meters armor_half_height = ARMOR_HEIGHT / 2000.0f; // Convert to meters
} }
cv::Point2f* BallisticPredictor::predict_ballistic_point(const cv::Point2f* predicted_center, BallisticPredictor::~BallisticPredictor() {
const cv::Point2f& img_center, if (last_ballistic_point != nullptr) {
delete last_ballistic_point;
last_ballistic_point = nullptr;
}
}
cv::Point2f* BallisticPredictor::predict_ballistic_point(const cv::Point2f* predicted_center,
const cv::Point2f& img_center,
const cv::Point2f& target_speed) { const cv::Point2f& target_speed) {
// Clean up the last ballistic point before creating a new one
if (last_ballistic_point != nullptr) {
delete last_ballistic_point;
last_ballistic_point = nullptr;
}
if (predicted_center == nullptr) { if (predicted_center == nullptr) {
if (last_ballistic_point != nullptr) {
delete last_ballistic_point;
last_ballistic_point = nullptr;
}
return nullptr; return nullptr;
} }
@@ -36,9 +45,6 @@ cv::Point2f* BallisticPredictor::predict_ballistic_point(const cv::Point2f* pred
cv::Point2f ballistic_point(ballistic_x, ballistic_y); cv::Point2f ballistic_point(ballistic_x, ballistic_y);
// Update last ballistic point // Update last ballistic point
if (last_ballistic_point != nullptr) {
delete last_ballistic_point;
}
last_ballistic_point = new cv::Point2f(ballistic_point); last_ballistic_point = new cv::Point2f(ballistic_point);
return last_ballistic_point; return last_ballistic_point;

View File

@@ -1,4 +1,5 @@
#include "ImagePreprocessor.h" #include "ImagePreprocessor.h"
#include <algorithm>
#include <cctype> #include <cctype>
ImagePreprocessor::ImagePreprocessor() { ImagePreprocessor::ImagePreprocessor() {
@@ -9,47 +10,16 @@ ImagePreprocessor::~ImagePreprocessor() {
// Destructor implementation // Destructor implementation
} }
cv::Mat ImagePreprocessor::get_mask(const cv::Mat& frame, const std::string& target_color) { void ImagePreprocessor::apply_morphology(const cv::Mat& input_mask, cv::Mat& output_mask) {
cv::Mat hsv_frame;
cv::cvtColor(frame, hsv_frame, cv::COLOR_BGR2HSV);
cv::Mat mask;
if (target_color == "red") {
// Red color range in HSV
cv::Mat mask1, mask2;
cv::inRange(hsv_frame, cv::Scalar(0, 43, 49), cv::Scalar(25, 255, 255), mask1);
cv::inRange(hsv_frame, cv::Scalar(156, 46, 49), cv::Scalar(180, 255, 255), mask2);
mask = mask1 | mask2;
} else if (target_color == "blue") {
// Blue color range in HSV
cv::inRange(hsv_frame, cv::Scalar(83, 200, 44), cv::Scalar(130, 255, 255), mask);
} else {
// Default to empty mask if color not recognized
mask = cv::Mat::zeros(frame.size(), CV_8UC1);
}
// Apply morphological operations to reduce noise // Apply morphological operations to reduce noise
cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3)); cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
cv::morphologyEx(mask, mask, cv::MORPH_OPEN, kernel); cv::morphologyEx(input_mask, output_mask, cv::MORPH_OPEN, kernel);
cv::morphologyEx(mask, mask, cv::MORPH_CLOSE, kernel); cv::morphologyEx(output_mask, output_mask, cv::MORPH_CLOSE, kernel);
return mask;
} }
cv::Mat ImagePreprocessor::get_color_only_frame(const cv::Mat& frame, const std::string& target_color) { void ImagePreprocessor::apply_gaussian_blur(const cv::Mat& input, cv::Mat& output, int kernel_size) {
cv::Mat mask = get_mask(frame, target_color); // Apply Gaussian blur to reduce noise
// Ensure kernel size is odd for Gaussian blur
// 不再反转掩膜,直接使用原始掩膜 int adjusted_kernel_size = kernel_size % 2 == 0 ? kernel_size + 1 : kernel_size;
// 目标颜色区域为白色(255),非目标颜色区域为黑色(0) cv::GaussianBlur(input, output, cv::Size(adjusted_kernel_size, adjusted_kernel_size), 0);
cv::Mat color_only_frame;
frame.copyTo(color_only_frame, mask); // 使用掩膜复制原始帧,只保留目标颜色部分
return color_only_frame;
}
void ImagePreprocessor::process_frame(const cv::Mat& frame, const std::string& target_color,
cv::Mat& mask, cv::Mat& color_only_frame) {
mask = get_mask(frame, target_color);
color_only_frame = get_color_only_frame(frame, target_color);
} }

View File

@@ -1,38 +1,90 @@
#include "KalmanFilter.h" #include "KalmanFilter.h"
#include "config.h" #include "config.h"
#include <iostream>
#include <chrono>
KalmanFilter::KalmanFilter() { KalmanFilter::KalmanFilter() {
// 4 state variables (x, y, dx, dy), 2 measurements (x, y) // 6 state variables (x, y, vx, vy, ax, ay), 2 measurements (x, y)
kf.init(4, 2, 0); kf.init(6, 2, 0);
init_params(); init_params();
initialized = false; initialized = false;
has_prev_measurement = false;
has_prev_velocity = false;
time_elapsed = 1.0 / 100.0; // Assuming 100 FPS as default
last_measurement = cv::Point2f(0, 0); last_measurement = cv::Point2f(0, 0);
last_prediction = cv::Point2f(0, 0); last_prediction = cv::Point2f(0, 0);
prev_measurement = cv::Point2f(0, 0);
prev_velocity = cv::Point2f(0, 0);
} }
void KalmanFilter::init_params() { void KalmanFilter::init_params() {
// Transition matrix: x(k+1) = x(k) + dx(k), y(k+1) = y(k) + dy(k), dx(k+1) = dx(k), dy(k+1) = dy(k) // Transition matrix for constant acceleration model
kf.transitionMatrix = (cv::Mat_<float>(4, 4) << // x(k+1) = x(k) + vx(k)*dt + 0.5*ax(k)*dt^2
1, 0, 1, 0, // y(k+1) = y(k) + vy(k)*dt + 0.5*ay(k)*dt^2
0, 1, 0, 1, // vx(k+1) = vx(k) + ax(k)*dt
0, 0, 1, 0, // vy(k+1) = vy(k) + ay(k)*dt
0, 0, 0, 1 // ax(k+1) = ax(k)
// ay(k+1) = ay(k)
double dt = time_elapsed;
kf.transitionMatrix = (cv::Mat_<float>(6, 6) <<
1, 0, dt, 0, 0.5*dt*dt, 0,
0, 1, 0, dt, 0, 0.5*dt*dt,
0, 0, 1, 0, dt, 0,
0, 0, 0, 1, 0, dt,
0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 1
); );
// Measurement matrix: measuring x and y // Measurement matrix: measuring x and y only
kf.measurementMatrix = (cv::Mat_<float>(2, 4) << kf.measurementMatrix = (cv::Mat_<float>(2, 6) <<
1, 0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 1, 0, 0 0, 1, 0, 0, 0, 0
); );
// Process noise covariance // Process noise covariance - higher for acceleration components
kf.processNoiseCov = cv::Mat::eye(4, 4, CV_32F) * KF_PROCESS_NOISE; kf.processNoiseCov = cv::Mat::zeros(6, 6, CV_32F);
kf.processNoiseCov.at<float>(0, 0) = 1.0f; // x position noise
kf.processNoiseCov.at<float>(1, 1) = 1.0f; // y position noise
kf.processNoiseCov.at<float>(2, 2) = 4.0f; // x velocity noise
kf.processNoiseCov.at<float>(3, 3) = 4.0f; // y velocity noise
kf.processNoiseCov.at<float>(4, 4) = 8.0f; // x acceleration noise
kf.processNoiseCov.at<float>(5, 5) = 8.0f; // y acceleration noise
// Measurement noise covariance // Measurement noise covariance
kf.measurementNoiseCov = cv::Mat::eye(2, 2, CV_32F) * KF_MEASUREMENT_NOISE; kf.measurementNoiseCov = cv::Mat::eye(2, 2, CV_32F) * 0.1f; // Lower measurement noise
// Error covariance post // Error covariance post
kf.errorCovPost = cv::Mat::eye(4, 4, CV_32F); kf.errorCovPost = cv::Mat::eye(6, 6, CV_32F) * 0.1f;
}
cv::Point2f KalmanFilter::estimate_velocity(const cv::Point2f& current, const cv::Point2f& previous) {
// Calculate velocity based on position difference
cv::Point2f velocity = current - previous;
velocity.x /= (float)time_elapsed;
velocity.y /= (float)time_elapsed;
return velocity;
}
void KalmanFilter::updateProcessNoise(const cv::Point2f& current_measurement) {
if (has_prev_measurement && has_prev_velocity) {
// Estimate current velocity
cv::Point2f current_velocity = estimate_velocity(current_measurement, prev_measurement);
// Calculate acceleration based on change in velocity
cv::Point2f acceleration = current_velocity - prev_velocity;
acceleration.x /= (float)time_elapsed;
acceleration.y /= (float)time_elapsed;
// Adjust process noise based on estimated acceleration (higher acceleration = higher uncertainty)
float acc_magnitude = std::sqrt(acceleration.x * acceleration.x + acceleration.y * acceleration.y);
// Increase process noise for rapid maneuvers
float noise_factor = 1.0f + 0.5f * acc_magnitude;
kf.processNoiseCov.at<float>(4, 4) = 8.0f * noise_factor; // ax noise
kf.processNoiseCov.at<float>(5, 5) = 8.0f * noise_factor; // ay noise
}
} }
void KalmanFilter::update(const cv::Point2f& measurement) { void KalmanFilter::update(const cv::Point2f& measurement) {
@@ -40,19 +92,65 @@ void KalmanFilter::update(const cv::Point2f& measurement) {
if (!initialized) { if (!initialized) {
// Initialize state if not done yet // Initialize state if not done yet
kf.statePost = (cv::Mat_<float>(4, 1) << kf.statePost = (cv::Mat_<float>(6, 1) <<
measurement.x, measurement.x, // x position
measurement.y, measurement.y, // y position
0.0f, 0.0f, // x velocity
0.0f); 0.0f, // y velocity
0.0f, // x acceleration
0.0f // y acceleration
);
initialized = true; initialized = true;
} else { } else {
// Update process noise based on target movement characteristics
updateProcessNoise(measurement);
// Perform correction step
kf.correct(measurement_mat); kf.correct(measurement_mat);
} }
// Update previous state for next iteration
if (has_prev_measurement) {
// Calculate velocity based on position change
cv::Point2f current_velocity = estimate_velocity(measurement, prev_measurement);
if (has_prev_velocity) {
// Estimate acceleration based on velocity change
cv::Point2f acceleration = current_velocity - prev_velocity;
acceleration.x /= (float)time_elapsed;
acceleration.y /= (float)time_elapsed;
// Update state with estimated acceleration
kf.statePost.at<float>(4) = acceleration.x; // x acceleration
kf.statePost.at<float>(5) = acceleration.y; // y acceleration
}
// Update previous velocity
prev_velocity = current_velocity;
has_prev_velocity = true;
} else {
// Initialize previous velocity as zero
prev_velocity = cv::Point2f(0, 0);
has_prev_velocity = true;
}
// Update stored measurements
prev_measurement = last_measurement;
has_prev_measurement = true;
last_measurement = measurement; last_measurement = measurement;
} }
cv::Point2f KalmanFilter::get_current_velocity() const {
if (!initialized) {
return cv::Point2f(0.0f, 0.0f);
}
// Return the velocity components from the state vector
float vx = kf.statePost.at<float>(2);
float vy = kf.statePost.at<float>(3);
return cv::Point2f(vx, vy);
}
cv::Point2f KalmanFilter::predict() { cv::Point2f KalmanFilter::predict() {
if (!initialized) { if (!initialized) {
return cv::Point2f(0, 0); return cv::Point2f(0, 0);

View File

@@ -1,20 +1,27 @@
#include "MindVisionCamera.h" #include "MindVisionCamera.h"
#include <iostream> #include <iostream>
#include <memory>
MindVisionCamera::MindVisionCamera(int cam_id, const std::string& target_color)
: camera_handle(-1), is_opened(false), width(640), height(480), fps(100), g_pRgbBuffer(nullptr) {
MindVisionCamera::MindVisionCamera(int cam_id, const std::string& target_color)
: camera_handle(-1), is_opened(false), width(640), height(480), fps(30) {
if (!initialize_camera(cam_id)) { if (!initialize_camera(cam_id)) {
// 即使初始化失败,也要清理可能已分配的资源
release();
throw std::runtime_error("Cannot initialize MindVision camera!"); throw std::runtime_error("Cannot initialize MindVision camera!");
} }
is_opened = true; is_opened = true;
this->target_color = target_color; this->target_color = target_color;
if (this->target_color != "red" && this->target_color != "blue") { if (this->target_color != "red" && this->target_color != "blue") {
release(); // 清理已初始化的资源
throw std::invalid_argument("Only 'red' or 'blue' colors are supported"); throw std::invalid_argument("Only 'red' or 'blue' colors are supported");
} }
set_cam_params(); if (!set_cam_params()) { // 修改set_cam_params现在返回bool值
release(); // 清理已初始化的资源
throw std::runtime_error("Failed to set camera parameters!");
}
} }
bool MindVisionCamera::initialize_camera(int cam_id) { bool MindVisionCamera::initialize_camera(int cam_id) {
@@ -59,16 +66,12 @@ bool MindVisionCamera::initialize_camera(int cam_id) {
return false; return false;
} }
// 设置输出格式 // 让SDK进入工作模式 - 根据原始OpenCv项目应该在设置格式前调用
if (capability.sIspCapacity.bMonoSensor) {
CameraSetIspOutFormat(camera_handle, CAMERA_MEDIA_TYPE_MONO8);
} else {
CameraSetIspOutFormat(camera_handle, CAMERA_MEDIA_TYPE_BGR8);
}
// 让SDK进入工作模式
CameraPlay(camera_handle); CameraPlay(camera_handle);
// 设置输出格式 - 保持与原始工作项目一致直接设置为BGR8格式
CameraSetIspOutFormat(camera_handle, CAMERA_MEDIA_TYPE_BGR8);
// 获取并设置分辨率为 640x480 // 获取并设置分辨率为 640x480
tSdkImageResolution image_resolution; tSdkImageResolution image_resolution;
int status = CameraGetImageResolution(camera_handle, &image_resolution); int status = CameraGetImageResolution(camera_handle, &image_resolution);
@@ -95,59 +98,62 @@ bool MindVisionCamera::initialize_camera(int cam_id) {
width = image_resolution.iWidth; width = image_resolution.iWidth;
height = image_resolution.iHeight; height = image_resolution.iHeight;
// 开启自动曝光 // 分配处理后的数据缓存区 - 使用最大分辨率,与原始项目一致
CameraSetAeState(camera_handle, true); g_pRgbBuffer = (unsigned char *)malloc(capability.sResolutionRange.iHeightMax *
capability.sResolutionRange.iWidthMax * 3);
// 设置帧率 // 设置帧率(虽然稍后 set_cam_params 会再次设置,但这里先设定)
CameraSetFrameSpeed(camera_handle, 10); // 设置为中等帧率 CameraSetFrameSpeed(camera_handle, 2); // 设置为较高帧率
std::cout << "Initialized camera with resolution: " << width << "x" << height << std::endl; std::cout << "Initialized camera with resolution: " << width << "x" << height << std::endl;
if (!g_pRgbBuffer) {
std::cerr << "Failed to allocate global RGB buffer" << std::endl;
return false;
}
return true; return true;
} }
MindVisionCamera::~MindVisionCamera() { MindVisionCamera::~MindVisionCamera() {
release(); try {
if (g_pRgbBuffer) {
free(g_pRgbBuffer);
g_pRgbBuffer = nullptr;
}
release();
} catch (...) {
// 析构函数中捕获异常但不重新抛出
// 确保即使在异常情况下也能正确清理资源
}
} }
void MindVisionCamera::set_camera_parameters() { void MindVisionCamera::set_camera_parameters() {
// 先设置为手动曝光模式 // 先设置为手动曝光模式
CameraSetAeState(camera_handle, false); CameraSetAeState(camera_handle, false);
// 根据颜色设置曝光参数 // 根据颜色设置曝光参数,以优化装甲板检测效果
if (target_color == "red") { if (target_color == "red") {
CameraSetExposureTime(camera_handle, 10000); // 5ms曝光时间 CameraSetExposureTime(camera_handle, 5000); // 红色目标曝光时间5ms (5000微秒)
} else if (target_color == "blue") { } else if (target_color == "blue") {
CameraSetExposureTime(camera_handle, 4000); // 8ms曝光时间 CameraSetExposureTime(camera_handle, 8000); // 蓝色目标曝光时间8ms (8000微秒)
} }
// 设置白平衡 // 设置白平衡
CameraSetWbMode(camera_handle, false); // 关闭自动白平衡 CameraSetWbMode(camera_handle, false); // 关闭自动白平衡
CameraSetOnceWB(camera_handle); // 一次性白平衡 CameraSetOnceWB(camera_handle); // 一次性白平衡
// 设置其他参数 // 设置其他参数
CameraSetTriggerMode(camera_handle, 0); // 连续采集模式 CameraSetTriggerMode(camera_handle, 0); // 连续采集模式
// 设置帧率为100FPS
CameraSetFrameSpeed(camera_handle, 2); // 2代表较高帧率具体值可能需要调整
} }
void MindVisionCamera::set_cam_params() { bool MindVisionCamera::set_cam_params() {
// 只设置相机参数(如曝光、白平衡等),不重新设置分辨率
set_camera_parameters(); set_camera_parameters();
return true;
// Ensure resolution remains 640x480 after setting other parameters
tSdkImageResolution image_resolution;
image_resolution.iIndex = 0xFF; // Use custom resolution (0xFF typically indicates custom resolution)
image_resolution.iWidth = 640;
image_resolution.iHeight = 480;
image_resolution.iWidthFOV = image_resolution.iWidth;
image_resolution.iHeightFOV = image_resolution.iHeight;
image_resolution.iHOffsetFOV = 0;
image_resolution.iVOffsetFOV = 0;
int status = CameraSetImageResolution(camera_handle, &image_resolution);
if (status != CAMERA_STATUS_SUCCESS) {
std::cout << "Failed to maintain resolution at 640x480 in set_cam_params" << std::endl;
} else {
std::cout << "Confirmed camera resolution: " << image_resolution.iWidth << "x" << image_resolution.iHeight << std::endl;
}
} }
bool MindVisionCamera::read_frame(cv::Mat& frame) { bool MindVisionCamera::read_frame(cv::Mat& frame) {
@@ -157,39 +163,88 @@ bool MindVisionCamera::read_frame(cv::Mat& frame) {
tSdkFrameHead sFrameInfo; tSdkFrameHead sFrameInfo;
BYTE* pbyBuffer; BYTE* pbyBuffer;
// 获取一帧数据 // 获取一帧数据
if (CameraGetImageBuffer(camera_handle, &sFrameInfo, &pbyBuffer, 1000) == CAMERA_STATUS_SUCCESS) { if (CameraGetImageBuffer(camera_handle, &sFrameInfo, &pbyBuffer, 1000) == CAMERA_STATUS_SUCCESS) {
// 处理图像数据 // 使用全局缓冲区处理图像数据 - 与原始项目一致
unsigned char* g_pRgbBuffer = new unsigned char[sFrameInfo.iWidth * sFrameInfo.iHeight * 3]; int status = CameraImageProcess(camera_handle, pbyBuffer, g_pRgbBuffer, &sFrameInfo);
CameraImageProcess(camera_handle, pbyBuffer, g_pRgbBuffer, &sFrameInfo); if (status != CAMERA_STATUS_SUCCESS) {
std::cerr << "Failed to process image: error code " << status << std::endl;
// 创建OpenCV Mat对象 CameraReleaseImageBuffer(camera_handle, pbyBuffer);
frame = cv::Mat( return false;
cv::Size(sFrameInfo.iWidth, sFrameInfo.iHeight), }
sFrameInfo.uiMediaType == CAMERA_MEDIA_TYPE_MONO8 ? CV_8UC1 : CV_8UC3,
g_pRgbBuffer // 创建OpenCV Mat对象 - 使用全局缓冲区固定为CV_8UC3格式
).clone(); // clone()确保数据被复制,而不是共享指针 cv::Mat temp_mat(sFrameInfo.iHeight, sFrameInfo.iWidth, CV_8UC3, g_pRgbBuffer);
frame = temp_mat.clone(); // clone()确保数据被复制,而不是共享指针
// 释放原始缓冲区 // 释放原始缓冲区
CameraReleaseImageBuffer(camera_handle, pbyBuffer); CameraReleaseImageBuffer(camera_handle, pbyBuffer);
// 释放临时缓冲区
delete[] g_pRgbBuffer;
return true; return true;
} }
return false;
}
bool MindVisionCamera::read_frame_with_color_filter(cv::Mat& frame, cv::Mat& raw_mask, const std::string& target_color) {
if (!is_opened) {
return false;
}
tSdkFrameHead sFrameInfo;
BYTE* pbyBuffer;
// 获取一帧数据
if (CameraGetImageBuffer(camera_handle, &sFrameInfo, &pbyBuffer, 1000) == CAMERA_STATUS_SUCCESS) {
// 使用全局缓冲区处理图像数据 - 与原始项目一致
int status = CameraImageProcess(camera_handle, pbyBuffer, g_pRgbBuffer, &sFrameInfo);
if (status != CAMERA_STATUS_SUCCESS) {
std::cerr << "Failed to process image: error code " << status << std::endl;
CameraReleaseImageBuffer(camera_handle, pbyBuffer);
return false;
}
// 创建原始RGB图像 - 使用全局缓冲区固定为CV_8UC3格式
cv::Mat original_img(sFrameInfo.iHeight, sFrameInfo.iWidth, CV_8UC3, g_pRgbBuffer);
// 复制图像数据,确保原始数据不会被后续操作影响
frame = original_img.clone();
// 创建HSV图像用于颜色过滤
cv::Mat hsv_img;
cv::cvtColor(frame, hsv_img, cv::COLOR_BGR2HSV);
// 根据颜色创建原始掩码
if (target_color == "red") {
// Red color range in HSV
cv::Mat mask1, mask2;
cv::inRange(hsv_img, cv::Scalar(0, 43, 49), cv::Scalar(25, 255, 255), mask1);
cv::inRange(hsv_img, cv::Scalar(156, 46, 49), cv::Scalar(180, 255, 255), mask2);
raw_mask = mask1 | mask2;
} else if (target_color == "blue") {
// Blue color range in HSV
cv::inRange(hsv_img, cv::Scalar(83, 200, 44), cv::Scalar(130, 255, 255), raw_mask);
} else {
raw_mask = cv::Mat::zeros(hsv_img.size(), CV_8UC1);
}
// 释放相机缓冲区
CameraReleaseImageBuffer(camera_handle, pbyBuffer);
return true;
}
return false; return false;
} }
void MindVisionCamera::release() { void MindVisionCamera::release() {
if (is_opened && camera_handle >= 0) { if (camera_handle >= 0) {
// 停止采集 // 停止采集不管is_opened状态如何都尝试释放
CameraUnInit(camera_handle); CameraUnInit(camera_handle);
camera_handle = -1; camera_handle = -1;
is_opened = false;
} }
is_opened = false;
} }
bool MindVisionCamera::switch_color(const std::string& new_color) { bool MindVisionCamera::switch_color(const std::string& new_color) {
@@ -201,8 +256,7 @@ bool MindVisionCamera::switch_color(const std::string& new_color) {
if ((lower_color == "red" || lower_color == "blue") && lower_color != target_color) { if ((lower_color == "red" || lower_color == "blue") && lower_color != target_color) {
target_color = lower_color; target_color = lower_color;
set_cam_params(); return set_cam_params(); // 返回set_cam_params的结果
return true;
} }
return false; return false;
} }

View File

@@ -3,6 +3,7 @@
#include <vector> #include <vector>
#include <chrono> #include <chrono>
#include <thread> #include <thread>
#include <memory>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
@@ -22,26 +23,21 @@ void output_control_data(const cv::Point2f* ballistic_point,
TTLCommunicator* ttl_communicator, TTLCommunicator* ttl_communicator,
const cv::Point2f& img_center, const cv::Point2f& img_center,
bool use_ttl) { bool use_ttl) {
// Only send data when TTL is enabled and meets frame interval // Only send data when TTL is enabled, meets frame interval, and has valid target
if (use_ttl && frame_counter % 5 == 0) { if (use_ttl && frame_counter % 5 == 0 && ballistic_point != nullptr) {
std::string send_str; std::string send_str;
if (ballistic_point != nullptr) { // Calculate offset (based on actual image center)
// Calculate offset (based on actual image center) int ballistic_offset_x = static_cast<int>(ballistic_point->x - img_center.x);
int ballistic_offset_x = static_cast<int>(ballistic_point->x - img_center.x); int ballistic_offset_y = static_cast<int>(img_center.y - ballistic_point->y);
int ballistic_offset_y = static_cast<int>(ballistic_point->y - img_center.y);
// Color simplification mapping // Color simplification mapping
std::string simplified_color = target_color; std::string simplified_color = target_color;
if (target_color == "red") simplified_color = "r"; if (target_color == "red") simplified_color = "r";
else if (target_color == "blue") simplified_color = "b"; else if (target_color == "blue") simplified_color = "b";
// Construct send string // Construct send string
send_str = "s " + simplified_color + " " + std::to_string(ballistic_offset_x) + " " + std::to_string(ballistic_offset_y); send_str = "s " + simplified_color + " " + std::to_string(ballistic_offset_x) + " " + std::to_string(ballistic_offset_y);
} else {
// Format when no detection result
send_str = "s u 0 0";
}
// Send data // Send data
if (ttl_communicator != nullptr) { if (ttl_communicator != nullptr) {
@@ -58,10 +54,10 @@ void set_camera_resolution(MindVisionCamera& , int width, int height) {
} }
int main(int /*argc*/, char* /*argv*/[]) { int main(int /*argc*/, char* /*argv*/[]) {
std::string target_color = "blue"; std::string target_color = "red";
int cam_id = 0; int cam_id = 0;
cv::Size default_resolution(640, 480); cv::Size default_resolution(640, 480);
bool use_ttl = false; // Set to false to disable TTL communication bool use_ttl = true; // Set to false to disable TTL communication
// Define optional resolution list (adjust based on camera support) // Define optional resolution list (adjust based on camera support)
std::vector<cv::Size> resolutions = { std::vector<cv::Size> resolutions = {
@@ -83,9 +79,14 @@ int main(int /*argc*/, char* /*argv*/[]) {
// Initialize TTL communication (only when enabled) // Initialize TTL communication (only when enabled)
TTLCommunicator* ttl = nullptr; TTLCommunicator* ttl = nullptr;
if (use_ttl) { if (use_ttl) {
ttl = new TTLCommunicator(TTL_BAUDRATE); // On Linux, the port would typically be /dev/ttyUSB0, /dev/ttyACM0, etc.
if (!ttl->connect()) { ttl = new TTLCommunicator("/dev/ttyUSB0", TTL_BAUDRATE);
std::cout << "Warning: Cannot establish TTL connection, will continue running but not send data" << std::endl; if (ttl != nullptr) {
if (!ttl->connect()) {
std::cout << "Warning: Cannot establish TTL connection, will continue running but not send data" << std::endl;
}
} else {
std::cout << "Error: Failed to create TTL communicator instance" << std::endl;
} }
} else { } else {
std::cout << "TTL communication disabled" << std::endl; std::cout << "TTL communication disabled" << std::endl;
@@ -119,17 +120,23 @@ int main(int /*argc*/, char* /*argv*/[]) {
cv::Mat frame; cv::Mat frame;
try { try {
while (true) { while (true) {
if (!camera.read_frame(frame)) { // 使用新的颜色过滤方法同时获取图像和原始掩码
std::cout << "Cannot read from MindVision camera, exiting!" << std::endl; cv::Mat raw_mask;
if (!camera.read_frame_with_color_filter(frame, raw_mask, target_color)) {
std::cout << "Cannot read from MindVision camera, exiting!HERERER" << std::endl;
break; break;
} }
// Get actual image size and calculate center // Get actual image size and calculate center
cv::Point2f img_center(frame.cols / 2.0f, frame.rows / 2.0f); cv::Point2f img_center(frame.cols / 2.0f, frame.rows / 2.0f);
// Preprocessing // 使用OpenCV进行形态学处理
cv::Mat mask, color_only_frame; cv::Mat mask;
preprocessor.process_frame(frame, target_color, mask, color_only_frame); preprocessor.apply_morphology(raw_mask, mask);
// 生成彩色图像(仅显示目标颜色)
cv::Mat color_only_frame;
frame.copyTo(color_only_frame, raw_mask);
// Armor plate detection // Armor plate detection
std::vector<LightBar> valid_light_bars; std::vector<LightBar> valid_light_bars;
@@ -142,12 +149,13 @@ int main(int /*argc*/, char* /*argv*/[]) {
current_center = &(armor_plates[0].center); current_center = &(armor_plates[0].center);
} }
cv::Point2f* predicted_center = nullptr; // Use smart pointer for safer memory management
std::unique_ptr<cv::Point2f> predicted_center = nullptr;
if (current_center != nullptr) { if (current_center != nullptr) {
// Has detection result: update Kalman filter, reset consecutive prediction count // Has detection result: update Kalman filter, reset consecutive prediction count
kalman_tracker.update(*current_center); kalman_tracker.update(*current_center);
predicted_center = new cv::Point2f(kalman_tracker.predict()); predicted_center = std::make_unique<cv::Point2f>(kalman_tracker.predict());
// Get velocity from the Kalman filter // Get velocity from the Kalman filter
target_speed = cv::Point2f(0.0f, 0.0f); // Kalman filter in OpenCV doesn't directly expose velocity target_speed = cv::Point2f(0.0f, 0.0f); // Kalman filter in OpenCV doesn't directly expose velocity
@@ -158,7 +166,7 @@ int main(int /*argc*/, char* /*argv*/[]) {
if (consecutive_predicts < max_consecutive_predicts) { if (consecutive_predicts < max_consecutive_predicts) {
cv::Point2f temp_pred = kalman_tracker.predict(); cv::Point2f temp_pred = kalman_tracker.predict();
if (temp_pred.x != 0 || temp_pred.y != 0) { // Check if prediction is valid if (temp_pred.x != 0 || temp_pred.y != 0) { // Check if prediction is valid
predicted_center = new cv::Point2f(temp_pred); predicted_center = std::make_unique<cv::Point2f>(temp_pred);
} }
} else { } else {
predicted_center = nullptr; predicted_center = nullptr;
@@ -169,13 +177,13 @@ int main(int /*argc*/, char* /*argv*/[]) {
// Determine display center // Determine display center
cv::Point2f* display_center = current_center; cv::Point2f* display_center = current_center;
if (display_center == nullptr && predicted_center != nullptr) { if (display_center == nullptr && predicted_center != nullptr) {
display_center = predicted_center; display_center = predicted_center.get();
} }
bool is_predicted = (display_center != nullptr) && (current_center == nullptr); bool is_predicted = (display_center != nullptr) && (current_center == nullptr);
// Calculate ballistic prediction point // Calculate ballistic prediction point
cv::Point2f* ballistic_point = ballistic_predictor.predict_ballistic_point( cv::Point2f* ballistic_point = ballistic_predictor.predict_ballistic_point(
predicted_center, img_center, target_speed predicted_center.get(), img_center, target_speed
); );
auto current_time = std::chrono::high_resolution_clock::now(); auto current_time = std::chrono::high_resolution_clock::now();
@@ -210,24 +218,36 @@ int main(int /*argc*/, char* /*argv*/[]) {
std::this_thread::sleep_for(std::chrono::milliseconds(10 - elapsed)); std::this_thread::sleep_for(std::chrono::milliseconds(10 - elapsed));
} }
// Clean up dynamically allocated memory // Smart pointers automatically handle memory cleanup
if (predicted_center != nullptr) {
delete predicted_center;
predicted_center = nullptr;
}
} }
} }
catch (const std::exception& e) { catch (const std::exception& e) {
std::cerr << "Error: " << e.what() << std::endl; std::cerr << "Error in main loop: " << e.what() << std::endl;
}
catch (...) {
std::cerr << "Unknown error occurred in main loop" << std::endl;
} }
// Cleanup // Cleanup
camera.release(); try {
cv::destroyAllWindows(); camera.release();
} catch (...) {
std::cerr << "Error during camera release" << std::endl;
}
try {
cv::destroyAllWindows();
} catch (...) {
std::cerr << "Error during window destruction" << std::endl;
}
// Only close TTL connection when enabled and initialized // Only close TTL connection when enabled and initialized
if (use_ttl && ttl != nullptr) { if (use_ttl && ttl != nullptr) {
ttl->close(); try {
ttl->close();
} catch (...) {
std::cerr << "Error during TTL close" << std::endl;
}
delete ttl; delete ttl;
ttl = nullptr; ttl = nullptr;
} }

View File

@@ -1,9 +1,17 @@
#include "TTLCommunicator.h" #include "TTLCommunicator.h"
#include <iostream> #include <iostream>
TTLCommunicator::TTLCommunicator(int baudrate) : baudrate(baudrate), connected(false) { #include <sys/types.h>
// Default to a common serial port name, though this will need to be configured for the specific system #include <sys/stat.h>
port_name = "/dev/ttyUSB0"; // Common for Linux/Ubuntu #include <fcntl.h>
#include <termios.h>
#include <unistd.h>
#include <errno.h>
#include <string.h>
TTLCommunicator::TTLCommunicator(const std::string& port, int baudrate)
: port_name(port), baudrate(baudrate), connected(false) {
serial_fd = -1;
} }
TTLCommunicator::~TTLCommunicator() { TTLCommunicator::~TTLCommunicator() {
@@ -11,19 +19,16 @@ TTLCommunicator::~TTLCommunicator() {
} }
bool TTLCommunicator::connect() { bool TTLCommunicator::connect() {
// Placeholder implementation - in a real implementation, you would open the serial port
std::cout << "Attempting to connect to TTL device on port: " << port_name << " at baud rate: " << baudrate << std::endl; std::cout << "Attempting to connect to TTL device on port: " << port_name << " at baud rate: " << baudrate << std::endl;
// For now, simulate a connection attempt
// In real implementation, use system calls to open the serial port
connected = open_serial_port(); connected = open_serial_port();
if (connected) { if (connected) {
std::cout << "TTL connection established." << std::endl; std::cout << "TTL connection established." << std::endl;
} else { } else {
std::cout << "Warning: Failed to establish TTL connection." << std::endl; std::cout << "Warning: Failed to establish TTL connection." << std::endl;
} }
return connected; return connected;
} }
@@ -37,23 +42,103 @@ void TTLCommunicator::close() {
bool TTLCommunicator::send_data(const std::string& data) { bool TTLCommunicator::send_data(const std::string& data) {
if (!connected) { if (!connected) {
std::cerr << "Error: Cannot send data, TTL not connected." << std::endl;
return false; return false;
} }
// Placeholder for sending data over serial if (serial_fd != -1) {
// In real implementation, write to the serial port ssize_t bytes_written = write(serial_fd, data.c_str(), data.length());
std::cout << "Sending TTL data: " << data << std::endl; if (bytes_written == -1) {
std::cerr << "Error writing to serial port: " << strerror(errno) << std::endl;
// Simulate successful transmission return false;
return true; } else {
std::cout << "Sent " << bytes_written << " bytes: " << data << std::endl;
fsync(serial_fd); // Ensure data is sent
return true;
}
}
return false;
} }
bool TTLCommunicator::open_serial_port() { bool TTLCommunicator::open_serial_port() {
// In a real implementation, this would use system calls like open(), tcsetattr(), etc. // Open the serial port
// For now, return true to simulate a successful connection serial_fd = open(port_name.c_str(), O_RDWR | O_NOCTTY);
if (serial_fd == -1) {
std::cerr << "Error opening serial port " << port_name << std::endl;
return false;
}
struct termios tty;
// Get current attributes
if (tcgetattr(serial_fd, &tty) != 0) {
std::cerr << "Error getting serial attributes" << std::endl;
::close(serial_fd);
return false;
}
// Set baud rate
speed_t baud_rate;
switch(baudrate) {
case 9600: baud_rate = B9600; break;
case 19200: baud_rate = B19200; break;
case 38400: baud_rate = B38400; break;
case 57600: baud_rate = B57600; break;
case 115200: baud_rate = B115200; break;
case 230400: baud_rate = B230400; break;
case 460800: baud_rate = B460800; break;
case 921600: baud_rate = B921600; break;
default:
std::cerr << "Unsupported baud rate: " << baudrate << std::endl;
::close(serial_fd);
return false;
}
cfsetispeed(&tty, baud_rate);
cfsetospeed(&tty, baud_rate);
// 8N1: 8 data bits, no parity, 1 stop bit
tty.c_cflag &= ~PARENB; // No parity
tty.c_cflag &= ~CSTOPB; // 1 stop bit
tty.c_cflag &= ~CSIZE; // Clear data bits
tty.c_cflag |= CS8; // 8 data bits
tty.c_cflag &= ~CRTSCTS; // No flow control
tty.c_cflag |= CREAD | CLOCAL; // Enable reading, ignore control lines
// Non-canonical mode
tty.c_lflag &= ~ICANON; // Non-canonical mode
tty.c_lflag &= ~ECHO; // No echo
tty.c_lflag &= ~ECHOE; // No erasure
tty.c_lflag &= ~ISIG; // No signal handling
// No software flow control
tty.c_iflag &= ~(IXON | IXOFF | IXANY);
// No output processing
tty.c_oflag &= ~OPOST;
// Timeouts
tty.c_cc[VMIN] = 0; // Non-blocking read
tty.c_cc[VTIME] = 5; // 0.5 second timeout
// Apply settings
if (tcsetattr(serial_fd, TCSANOW, &tty) != 0) {
std::cerr << "Error setting serial attributes" << std::endl;
::close(serial_fd);
return false;
}
// Flush port
tcflush(serial_fd, TCIOFLUSH);
return true; return true;
} }
void TTLCommunicator::close_serial_port() { void TTLCommunicator::close_serial_port() {
// In a real implementation, this would close the file descriptor if (serial_fd != -1) {
::close(serial_fd);
serial_fd = -1;
}
connected = false;
} }

View File

@@ -43,7 +43,7 @@ cv::Mat& Visualizer::draw_offset_text(cv::Mat& frame, const cv::Point2f* display
cv::Point2f img_center(frame.cols / 2.0f, frame.rows / 2.0f); cv::Point2f img_center(frame.cols / 2.0f, frame.rows / 2.0f);
int offset_x = static_cast<int>(display_center->x - img_center.x); int offset_x = static_cast<int>(display_center->x - img_center.x);
int offset_y = static_cast<int>(display_center->y - img_center.y); int offset_y = static_cast<int>(img_center.y - display_center->y);
std::stringstream ss; std::stringstream ss;
ss << "Offset: (" << offset_x << ", " << offset_y << ")"; ss << "Offset: (" << offset_x << ", " << offset_y << ")";

View File

@@ -1,237 +0,0 @@
#include <iostream>
#include <string>
#include <vector>
#include <chrono>
#include <thread>
#include <opencv2/opencv.hpp>
#include "config.h"
#include "MindVisionCamera.h" // 使用MindVision相机
#include "ImagePreprocessor.h"
#include "ArmorDetector.h"
#include "KalmanFilter.h"
#include "Visualizer.h"
#include "BallisticPredictor.h"
#include "TTLCommunicator.h"
// Function to output control data to TTL device (with enable control)
void output_control_data(const cv::Point2f* ballistic_point,
const std::string& target_color,
int frame_counter,
TTLCommunicator* ttl_communicator,
const cv::Point2f& img_center,
bool use_ttl,
bool armor_detected) {
// Only send data when TTL is enabled, meets frame interval, AND armor is detected
if (use_ttl && frame_counter % 5 == 0 && armor_detected) {
// Only send if we have a valid ballistic point to send
if (ballistic_point != nullptr) {
// Calculate offset (based on actual image center)
int ballistic_offset_x = static_cast<int>(ballistic_point->x - img_center.x);
int ballistic_offset_y = static_cast<int>(ballistic_point->y - img_center.y);
// Color simplification mapping
std::string simplified_color = target_color;
if (target_color == "red") simplified_color = "r";
else if (target_color == "blue") simplified_color = "b";
// Construct send string
std::string send_str = "s " + simplified_color + " " + std::to_string(ballistic_offset_x) + " " + std::to_string(ballistic_offset_y);
// Send data
if (ttl_communicator != nullptr) {
ttl_communicator->send_data(send_str);
}
}
// Note: When ballistic_point is nullptr, no TTL data is sent (fulfilling the requirement)
}
}
void set_camera_resolution(MindVisionCamera, int width, int height) {
// The resolution is set during camera initialization in MindVision
// We already set it in MindVisionCamera constructor, so this is for logging only
std::cout << "Camera resolution already set to: " << width << "x" << height << " during initialization" << std::endl;
}
int main(int /*argc*/, char* /*argv*/[]) {
std::string target_color = "red";
int cam_id = 0;
cv::Size default_resolution(640, 480); // This will be set by MindVisionCamera constructor
bool use_ttl = true; // Set to false to disable TTL communication
// Define optional resolution list (adjust based on camera support)
std::vector<cv::Size> resolutions = {
cv::Size(320, 240), // Low resolution, high frame rate
cv::Size(640, 480), // Standard resolution
cv::Size(1280, 720), // HD resolution
cv::Size(1920, 1080) // Full HD resolution
};
// Find the index of default resolution
int res_index = 1; // Default to index 1 (640x480)
for (size_t i = 0; i < resolutions.size(); i++) {
if (resolutions[i] == default_resolution) {
res_index = static_cast<int>(i);
break;
}
}
// Initialize TTL communication (only when enabled)
TTLCommunicator* ttl = nullptr;
if (use_ttl) {
ttl = new TTLCommunicator(TTL_BAUDRATE);
if (!ttl->connect()) {
std::cout << "Warning: Cannot establish TTL connection, will continue running but not send data" << std::endl;
}
} else {
std::cout << "TTL communication disabled" << std::endl;
}
// Initialize visual processing modules with MindVision camera
MindVisionCamera camera(cam_id, target_color);
if (!camera.is_opened) {
std::cerr << "Cannot open MindVision camera!" << std::endl;
return -1;
}
// Initialize ballistic predictor (adjustable bullet speed, e.g., 16m/s)
BallisticPredictor ballistic_predictor(16.0f);
// Record target speed (obtained from Kalman filter)
cv::Point2f target_speed(0.0f, 0.0f);
// Set initial resolution
set_camera_resolution(camera, resolutions[res_index].width, resolutions[res_index].height);
std::cout << "Initial camera resolution: " << resolutions[res_index].width << "x" << resolutions[res_index].height << std::endl;
ImagePreprocessor preprocessor;
ArmorDetector detector;
KalmanFilter kalman_tracker;
Visualizer visualizer;
int frame_counter = 0; // Counter to control output frequency
int max_consecutive_predicts = 20; // Maximum consecutive prediction times
int consecutive_predicts = 0; // Current consecutive prediction count
cv::Mat frame;
try {
while (true) {
//if (!camera.read_frame(frame)) {
// std::cout << "Cannot read from MindVision camera, exiting!" << std::endl;
// break;
//}
// Get actual image size and calculate center
cv::Point2f img_center(frame.cols / 2.0f, frame.rows / 2.0f);
// Preprocessing
cv::Mat mask, color_only_frame;
preprocessor.process_frame(frame, target_color, mask, color_only_frame);
// Armor plate detection
std::vector<LightBar> valid_light_bars;
std::vector<ArmorPlate> armor_plates;
detector.detect(mask, target_color, valid_light_bars, armor_plates);
// Kalman filter tracking (modified part: get target speed)
cv::Point2f* current_center = nullptr;
if (!armor_plates.empty()) {
current_center = &(armor_plates[0].center);
}
cv::Point2f* predicted_center = nullptr;
if (current_center != nullptr) {
// Has detection result: update Kalman filter, reset consecutive prediction count
kalman_tracker.update(*current_center);
predicted_center = new cv::Point2f(kalman_tracker.predict());
// Get velocity from the Kalman filter
target_speed = cv::Point2f(0.0f, 0.0f); // Kalman filter in OpenCV doesn't directly expose velocity
consecutive_predicts = 0;
} else {
// No detection result: only use Kalman prediction, limit consecutive predictions
consecutive_predicts++;
if (consecutive_predicts < max_consecutive_predicts) {
cv::Point2f temp_pred = kalman_tracker.predict();
if (temp_pred.x != 0 || temp_pred.y != 0) { // Check if prediction is valid
predicted_center = new cv::Point2f(temp_pred);
}
} else {
predicted_center = nullptr;
target_speed = cv::Point2f(0.0f, 0.0f);
}
}
// Determine display center
cv::Point2f* display_center = current_center;
if (display_center == nullptr && predicted_center != nullptr) {
display_center = predicted_center;
}
bool is_predicted = (display_center != nullptr) && (current_center == nullptr);
// Calculate ballistic prediction point
cv::Point2f* ballistic_point = ballistic_predictor.predict_ballistic_point(
predicted_center, img_center, target_speed
);
auto current_time = std::chrono::high_resolution_clock::now();
// Visualization
visualizer.draw_light_bars(frame, valid_light_bars, target_color);
if (!armor_plates.empty()) {
visualizer.draw_armor_plate(frame, armor_plates[0]);
}
visualizer.draw_offset_text(frame, display_center, target_color, is_predicted);
visualizer.draw_ballistic_point(frame, ballistic_point);
// Output control data to TTL (passing use_ttl to control whether to send)
// Only send TTL data if actual armor plates were detected (not just predicted)
bool armor_detected = !armor_plates.empty();
// Only send TTL data when we have an actual detection, not just prediction
bool should_send_ttl = armor_detected && (display_center != nullptr);
output_control_data(display_center, target_color, frame_counter, ttl, img_center, use_ttl, should_send_ttl);
// Display windows
cv::imshow("Armor Detection", frame);
cv::imshow(target_color + " Mask", mask);
cv::imshow(target_color + " Only", color_only_frame);
// Exit on 'q' key press
if (cv::waitKey(1) == 'q') {
break;
}
frame_counter++;
// Control max frame rate (100 FPS)
auto end_time = std::chrono::high_resolution_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end_time - current_time).count();
if (elapsed < 10) { // 100 FPS = 10ms per frame
std::this_thread::sleep_for(std::chrono::milliseconds(30 - elapsed));
}
// Clean up dynamically allocated memory
if (predicted_center != nullptr) {
delete predicted_center;
predicted_center = nullptr;
}
}
}
catch (const std::exception& e) {
std::cerr << "Error: " << e.what() << std::endl;
}
// Cleanup
camera.release();
cv::destroyAllWindows();
// Only close TTL connection when enabled and initialized
if (use_ttl && ttl != nullptr) {
ttl->close();
delete ttl;
ttl = nullptr;
}
return 0;
}