This commit is contained in:
xinyang
2019-04-14 17:12:43 +08:00
commit 42c5434dc8
47 changed files with 6423 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
cmake-build-debug/*
.idea/*

27
CMakeLists.txt Normal file
View File

@@ -0,0 +1,27 @@
cmake_minimum_required(VERSION 3.5)
project(auto-aim)
set(CMAKE_CXX_STANDARD 11)
SET(CMAKE_BUILD_TYPE DEBUG)
FIND_PROGRAM(CCACHE_FOUND ccache)
IF(CCACHE_FOUND)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
message("< Use ccache for compiler >")
ENDIF()
FIND_PACKAGE(OpenCV 3 REQUIRED)
FIND_PACKAGE(Threads)
include_directories( ${PROJECT_SOURCE_DIR}/energy/include )
include_directories( ${PROJECT_SOURCE_DIR}/armor/include )
include_directories( ${PROJECT_SOURCE_DIR}/include )
include_directories( ${PROJECT_SOURCE_DIR}/src )
FILE(GLOB_RECURSE sourcefiles "src/*.cpp" "energy/src/*cpp" "armor/src/*.cpp")
add_executable(run main.cpp ${sourcefiles} )
TARGET_LINK_LIBRARIES (run ${CMAKE_THREAD_LIBS_INIT})
TARGET_LINK_LIBRARIES(run ${OpenCV_LIBS})
TARGET_LINK_LIBRARIES(run ${PROJECT_SOURCE_DIR}/libMVSDK.so)

View File

@@ -0,0 +1,57 @@
//
// Created by xinyang on 19-3-27.
//
#ifndef _ARMOR_FINDER_H_
#define _ARMOR_FINDER_H_
#include <opencv2/core.hpp>
#include <opencv2/tracking.hpp>
#include <uart/uart.h>
typedef enum{
ENEMY_BLUE, ENEMY_RED
} EnemyColor;
class ArmorFinder{
public:
ArmorFinder(EnemyColor color, Uart &u);
~ArmorFinder() = default;
private:
typedef cv::TrackerKCF TrackerToUse;
typedef enum{
SEARCHING_STATE, TRACKING_STATE, STANDBY_STATE
} State;
EnemyColor enemy_color;
State state;
cv::Rect2d armor_box;
cv::Ptr<cv::Tracker> tracker;
int contour_area;
Uart &uart;
bool stateSearchingTarget(cv::Mat &src);
bool stateTrackingTarget(cv::Mat &src);
bool stateStandBy();
public:
void run(cv::Mat &src);
bool sendBoxPosition();
};
struct LightBlob {
cv::RotatedRect rect;
double length;
explicit LightBlob(cv::RotatedRect &r) : rect(r) {
length = std::max(rect.size.height, rect.size.width);
};
bool operator<(LightBlob &l2) { return this->rect.center.x < l2.rect.center.x; }
bool operator<=(LightBlob &l2) { return this->rect.center.x <= l2.rect.center.x; }
bool operator>(LightBlob &l2) { return this->rect.center.x > l2.rect.center.x; }
bool operator>=(LightBlob &l2) { return this->rect.center.x >= l2.rect.center.x; }
};
#endif /* _ARMOR_FINDER_H_ */

View File

@@ -0,0 +1,17 @@
//
// Created by xinyang on 19-3-27.
//
#ifndef _SHOW_IMAGES_H_
#define _SHOW_IMAGES_H_
#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <armor_finder/armor_finder.h>
void showArmorBoxVector(std::string windows_name, const cv::Mat &src, const std::vector<cv::Rect2d> &armor_box);
void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor_box);
void showContours(std::string windows_name, const cv::Mat &src, const std::vector<LightBlob> &light_blobs);
#endif /* _SHOW_IMAGES_H_ */

View File

@@ -0,0 +1,66 @@
//
// Created by xinyang on 19-3-27.
//
#include <log.h>
#include <armor_finder/armor_finder.h>
ArmorFinder::ArmorFinder(EnemyColor color, Uart &u) :
uart(u),
enemy_color(color),
state(STANDBY_STATE)
{
auto para = TrackerToUse::Params();
para.desc_npca = 1;
para.desc_pca = 0;
tracker = TrackerToUse::create(para);
if(!tracker){
LOGW("Tracker Not init");
}
}
void ArmorFinder::run(cv::Mat &src) {
cv::Mat src_use;
if (src.type() == CV_8UC3) {
cv::cvtColor(src, src_use, CV_RGB2GRAY);
}else{
src_use = src.clone();
}
// return stateSearchingTarget(src_use);
switch (state){
case SEARCHING_STATE:
if(stateSearchingTarget(src_use)){
if((armor_box & cv::Rect2d(0, 0, 640, 480)) == armor_box) {
cv::Mat roi = src_use.clone()(armor_box);
cv::threshold(roi, roi, 200, 255, cv::THRESH_BINARY);
contour_area = cv::countNonZero(roi);
tracker->init(src_use, armor_box);
state = TRACKING_STATE;
LOGW("into track");
}
}
break;
case TRACKING_STATE:
if(!stateTrackingTarget(src_use)){
state = SEARCHING_STATE;
//std::cout << "into search!" << std::endl;
}
break;
case STANDBY_STATE:
default:
stateStandBy();
}
}
#define FOCUS_PIXAL (0.36/0.48*640)
bool ArmorFinder::sendBoxPosition() {
auto rect = armor_box;
double dx = rect.x + rect.width/2 - 320;
double dy = rect.y + rect.height/2 - 240;
double yaw = atan(dx / FOCUS_PIXAL) * 180 / 3.14159265459;
double pitch = atan(dy / FOCUS_PIXAL) * 180 / 3.14159265459;
uart.sendTarget(yaw, pitch, 0);
return true;
}

View File

@@ -0,0 +1,63 @@
//
// Created by xinyang on 19-3-27.
//
#include "image_process.h"
static void splitBayerBG(cv::Mat &src, cv::Mat &blue, cv::Mat &red) {
uchar* data;
uchar* bayer_data[2];
for (int i = 0; i < src.rows; ++i) {
data = src.ptr<uchar>(i);
bayer_data[0] = blue.ptr<uchar>(i / 2);
for (int j = 0; j < blue.cols; ++j, data += 2) {
bayer_data[0][j] = *data;
}
data = src.ptr<uchar>(++i) + 1;
bayer_data[1] = red.ptr<uchar>(i / 2);
for (int j = 0; j < red.cols; ++j, data += 2) {
bayer_data[1][j] = *data;
}
}
}
void imageColorSplit(cv::Mat &src_input, cv::Mat &split, EnemyColor color) {
cv::Mat blue(240, 320, CV_8UC1), red(240, 320, CV_8UC1);
if(src_input.type() == CV_8UC1){
splitBayerBG(src_input, blue, red);
if(color == ENEMY_RED){
split = red - blue;
}else if(color == ENEMY_BLUE){
split = blue - red;
}
}else if(src_input.type() == CV_8UC3){
std::vector<cv::Mat> channels;
cv::split(src_input, channels);
resize(channels.at(0), blue, cv::Size(640, 480));
resize(channels.at(2), red, cv::Size(640, 480));
if(color == ENEMY_RED){
split = red;
}else if(color == ENEMY_BLUE){
split = blue;
}
}
}
void imagePreProcess(cv::Mat &src) {
static cv::Mat kernel_erode = getStructuringElement(cv::MORPH_RECT, cv::Size(1, 4));
erode(src, src, kernel_erode);
static cv::Mat kernel_dilate = getStructuringElement(cv::MORPH_RECT, cv::Size(2, 4));
dilate(src, src, kernel_dilate);
static cv::Mat kernel_erode2 = getStructuringElement(cv::MORPH_RECT, cv::Size(2, 4));
erode(src, src, kernel_erode2);
static cv::Mat kernel_dilate2 = getStructuringElement(cv::MORPH_RECT, cv::Size(3, 6));
dilate(src, src, kernel_dilate2);
float alpha = 1.5;
int beta = 0;
src.convertTo(src, -1, alpha, beta);
}

View File

@@ -0,0 +1,14 @@
//
// Created by xinyang on 19-3-27.
//
#ifndef _IMAGE_PROCESS_H_
#define _IMAGE_PROCESS_H_
#include <opencv2/core.hpp>
#include <armor_finder/armor_finder.h>
void imageColorSplit(cv::Mat &src_input, cv::Mat &split, EnemyColor color);
void imagePreProcess(cv::Mat &src);
#endif /* _IMAGE_PROCESS_H_ */

View File

@@ -0,0 +1,192 @@
//
// Created by xinyang on 19-3-27.
//
#include <armor_finder/armor_finder.h>
#include <opencv2/highgui.hpp>
#include "image_process/image_process.h"
#include <log.h>
#include <show_images/show_images.h>
#include <options/options.h>
typedef std::vector<LightBlob> LightBlobs;
static double lw_rate(const cv::RotatedRect &rect){
return (rect.size.height > rect.size.width)?
(rect.size.height / rect.size.width):
(rect.size.width / rect.size.height);
}
static bool isValidLightBlob(const cv::RotatedRect &rect){
return (lw_rate(rect) > 1.2) &&
((rect.size.width * rect.size.height) < 3000) &&
((rect.size.width * rect.size.height) > 1);
}
static void pipelineLightBlobPreprocess(cv::Mat &src) {
src -= 150;
src *= 3.5;
src -= 150;
src *= 3.5;
}
static bool findLightBlobs(const cv::Mat &src, LightBlobs &light_blobs) {
static cv::Mat src_bin;
cv::threshold(src, src_bin, 80, 255, CV_THRESH_BINARY);
std::vector<std::vector<cv::Point> > light_contours;
cv::findContours(src_bin, light_contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
for (auto &light_contour : light_contours) {
cv::RotatedRect rect = cv::minAreaRect(light_contour);
if(isValidLightBlob(rect)){
light_blobs.emplace_back(rect);
}
}
return light_blobs.size() >= 2;
}
bool angelJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle :
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle :
light_blob_j.rect.angle - 90;
return abs(angle_i-angle_j)<10;
}
bool heightJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
cv::Point2f centers = light_blob_i.rect.center - light_blob_j.rect.center;
return abs(centers.y)<30;
}
bool lengthJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
double side_length;
cv::Point2f centers = light_blob_i.rect.center - light_blob_j.rect.center;
side_length = sqrt(centers.ddot(centers));
// std::cout << "side:" << side_length << " length:" << light_blob_i.length << std::endl;
return (side_length / light_blob_i.length < 6 && side_length / light_blob_i.length > 0.5);
}
bool lengthRatioJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
// std::cout << "i:" << light_blob_i.length << " j:" << light_blob_j.length << std::endl;
return (light_blob_i.length / light_blob_j.length < 2
&& light_blob_i.length / light_blob_j.length > 0.5);
}
bool isCoupleLight(const LightBlob &light_blob_i, const LightBlob &light_blob_j) {
if(!lengthRatioJudge(light_blob_i, light_blob_j)){
// std::cout << "lengthRatioJudge" << std::endl;
return false;
}
if(!lengthJudge(light_blob_i, light_blob_j)){
// std::cout << "lengthJudge" << std::endl;
return false;
}
if(!heightJudge(light_blob_i, light_blob_j)){
// std::cout << "heightJudge" << std::endl;
return false;
}
if(!angelJudge(light_blob_i, light_blob_j)){
// std::cout << "angelJudge" << std::endl;
return false;
}
return true;
return lengthRatioJudge(light_blob_i, light_blob_j) &&
lengthJudge(light_blob_i, light_blob_j) &&
heightJudge(light_blob_i, light_blob_j) &&
angelJudge(light_blob_i, light_blob_j);
}
double centerDistance(cv::Rect2d box){
double dx = box.x-box.width/2 - 320;
double dy = box.y-box.height/2 - 240;
return dx*dx + dy*dy;
}
static bool findArmorBoxes(LightBlobs &light_blobs, std::vector<cv::Rect2d> &armor_boxes) {
for (int i = 0; i < light_blobs.size() - 1; ++i) {
for (int j = i + 1; j < light_blobs.size(); ++j) {
if (!isCoupleLight(light_blobs.at(i), light_blobs.at(j))) {
continue;
}
cv::Rect2d rect_left = light_blobs.at(static_cast<unsigned long>(i)).rect.boundingRect();
cv::Rect2d rect_right = light_blobs.at(static_cast<unsigned long>(j)).rect.boundingRect();
double min_x, min_y, max_x, max_y;
min_x = fmin(rect_left.x, rect_right.x) - 5;
max_x = fmax(rect_left.x + rect_left.width, rect_right.x + rect_right.width) + 5;
min_y = fmin(rect_left.y, rect_right.y) - 5;
max_y = fmax(rect_left.y + rect_left.height, rect_right.y + rect_right.height) + 5;
if (min_x < 0 || max_x > 640 || min_y < 0 || max_y > 480) {
continue;
}
armor_boxes.emplace_back(cv::Rect2d(min_x, min_y, max_x - min_x, max_y - min_y));
}
}
if(armor_boxes.empty()){
return false;
}
sort(armor_boxes.begin(), armor_boxes.end(), [](cv::Rect2d box1, cv::Rect2d box2)->bool{
return centerDistance(box1) < centerDistance(box2);
});
return true;
}
bool judge_light_color(std::vector<LightBlob> &light, std::vector<LightBlob> &color, std::vector<LightBlob> &result) {
for (auto &i:color) {
for (auto &j:light) {
cv::Rect2d a = i.rect.boundingRect2f();
cv::Rect2d b = j.rect.boundingRect2f();
cv::Rect2d ab = a & b;
if (ab.area() / fmin(a.area(), b.area()) >= 0.2) {
result.emplace_back(j);
break;
}
}
}
return !result.empty();
}
bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
cv::Mat split, pmsrc=src.clone();
LightBlobs light_blobs, pm_light_blobs, light_blobs_real;
std::vector<cv::Rect2d> armor_boxes;
// cv::resize(src, pmsrc, cv::Size(320, 240));
imageColorSplit(src, split, enemy_color);
imagePreProcess(split);
cv::resize(split, split, cv::Size(640, 480));
// pipelineLightBlobPreprocess(pmsrc);
// if(!findLightBlobs(pmsrc, pm_light_blobs)){
// return false;
// }
if(!findLightBlobs(split, light_blobs)){
return false;
}
// if(!judge_light_color(light_blobs, pm_light_blobs, light_blobs_real)){
// return false;
// }
if(show_light_blobs){
showContours("blobs", split, light_blobs);
// showContours("pm blobs", pmsrc, pm_light_blobs);
// showContours("blobs real", src, light_blobs_real);
cv::waitKey(1);
}
if(!findArmorBoxes(light_blobs, armor_boxes)){
return false;
}
armor_box = armor_boxes[0];
if(show_armor_boxes){
showArmorBoxVector("boxes", split, armor_boxes);
cv::waitKey(1);
}
if(split.size() == cv::Size(320, 240)){
armor_box.x *= 2;
armor_box.y *= 2;
armor_box.width *= 2;
armor_box.height *= 2;
}
return sendBoxPosition();
}

View File

@@ -0,0 +1,11 @@
//
// Created by xinyang on 19-3-27.
//
#include <armor_finder/armor_finder.h>
bool ArmorFinder::stateStandBy() {
state = SEARCHING_STATE;
return true;
}

View File

@@ -0,0 +1,21 @@
//
// Created by xinyang on 19-3-27.
//
#include <log.h>
#include <armor_finder/armor_finder.h>
bool ArmorFinder::stateTrackingTarget(cv::Mat &src) {
auto last = armor_box;
tracker->update(src, armor_box);
if((armor_box & cv::Rect2d(0, 0, 640, 480)) != armor_box){
return false;
}
cv::Mat roi = src(armor_box);
threshold(roi, roi, 200, 255, cv::THRESH_BINARY);
if(abs(cv::countNonZero(roi) - contour_area) > contour_area * 0.3){
return false;
}
return sendBoxPosition();
}

View File

@@ -0,0 +1,52 @@
#include <show_images/show_images.h>
using namespace cv;
void showArmorBoxVector(std::string windows_name, const cv::Mat &src, const std::vector<cv::Rect2d> &armor_box) {
static Mat image2show;
if (src.type() == CV_8UC1) // 黑白图像
{
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if(src.type() == CV_8UC3) //RGB 彩色
{
image2show = src.clone();
}
for (auto &box:armor_box) {
rectangle(image2show, box, Scalar(0, 255, 0), 1);
}
imshow(windows_name, image2show);
}
void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor_box) {
static Mat image2show;
if (src.type() == CV_8UC1) // 黑白图像
{
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if(src.type() == CV_8UC3) //RGB 彩色
{
image2show = src.clone();
}
rectangle(image2show, armor_box, Scalar(0, 255, 0), 1);
imshow(windows_name, image2show);
}
void showContours(std::string windows_name, const cv::Mat &src, const std::vector<LightBlob> &light_blobs) {
static Mat image2show;
if(src.type() == CV_8UC1) // 黑白图像
{
cvtColor(src, image2show, COLOR_GRAY2RGB);
}
else if(src.type() == CV_8UC3) //RGB 彩色
{
image2show = src.clone();
}
for(const auto &light_blob:light_blobs)
{
rectangle(image2show, light_blob.rect.boundingRect(), Scalar(255,0,0), 3);
}
imshow(windows_name, image2show);
}

View File

@@ -0,0 +1,43 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#ifndef CONSTANT_H
#define CONSTANT_H
#define d2r (CV_PI / 180.0)
const int ALLY_BLUE = 123;
const int ALLY_RED = 456;
const int SRC_WIDTH_CAMERA = 640;
const int SRC_HEIGHT_CAMERA = 480;
const int SRC_WIDTH = 320;
const int SRC_HEIGHT = 240;
const double PI = 3.1415926;
const int CLOCKWISE = 1;
const int ANTICLOCKWISE = -1;
const float ATTACK_DISTANCE = 770;//cm
const double WHOLE_FAN = 80;//cm
//const double ARMOR_CENTER_TO_CYCLE_CENTER = 75;//cm
const double ARMOR_CENTER_TO_CYCLE_CENTER = 71;//cm
const int EXTRACT_POINT_X = 200;
const int EXTRACT_POINT_Y = 20;
const int EXTRACT_WIDTH = 240;
const int EXTRACT_HEIGHT = 180;
//以摄像头正方向位y轴
const int GM_L = 14;//云台摄像头z方向
const int GM_X = 15;//云台摄像头x方向
const int GM_H = 16;//云台摄像头y方向
//const double STRETCH = 231.0/640.0;//实际距离与图像伸缩比
const double STRETCH = 231.0/640.0;
const int ZERO_POINT_X = 281;
const int ZERO_POINT_Y = 188;
const double YAW_ORIGIN_RAD = PI/180*2.25;
const double PITCH_ORIGIN_RAD = PI/180*14.85;
const double LIFT_HEIGHT = 20;//云台抬升高度
#endif //CONSTANT_H

View File

@@ -0,0 +1,129 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#ifndef ENERGY_H
#define ENERGY_H
#include <iostream>
#include <vector>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <stdio.h>
#include <time.h>
#include <sys/timeb.h>
#include "energy/constant.h"
#include "energy/param_struct_define.h"
#include "uart/uart.h"
using std::vector;
class Energy {
public:
Energy(Uart &u);
~Energy();
int run(cv::Mat &src);
cv::Point2f uart_hit_point;
clock_t start;
Uart &uart;
void setAllyColor(int color);
void setRotation(int rotation);
void extract(cv::Mat &src);
void sendTargetByUart(float x, float y, float z);
private:
EnergyPartParam energy_part_param_;
int fans_cnt;
int armors_cnt;
int count;
int last_fans_cnt;
int last_armors_cnt;
double radius;
double target_position;
double last_target_position;
float target_armor;
int ally_color_;
int energy_part_rotation;
float attack_distance;
int send_cnt;
double rectified_focal_length;
double theta;//电机pitch轴应旋转的角度
double phi;//电机yaw轴应旋转的角度
float yaw_rotation;
float pitch_rotation;
int isLeftVertexFound, isTopVertexFound, isRightVertexFound, isBottomVertexFound;
std::vector<EnergyPart> fans;
std::vector<EnergyPart> armors;
std::vector<EnergyPart> gimble_zero_points;
cv::Point cycle_center;
cv::Point target_center;
cv::Point last_target_center;
cv::Point hit_point;
std::vector<float>fanPosition;
std::vector<float>armorPosition;
std::vector<cv::Point> Armor_center;
std::vector<cv::Point> first_armor_centers;
std::vector<cv::Point> all_armor_centers;
cv::Point left, right, top, bottom;
cv::Mat src_blue, src_red, src_green;
void initEnergyPartParam();
int findFan(const cv::Mat &src, vector<EnergyPart> &fans, int &last_fans_cnt);
int findArmor(const cv::Mat &src, vector<EnergyPart> &armors, int &last_armors_cnt);
int findGimbleZeroPoint(const cv::Mat &src, vector<EnergyPart> &gimble_zero_point);
void showFanContours(std::string windows_name, const cv::Mat &src, const std::vector<EnergyPart> &fans);
void showArmorContours(std::string windows_name, const cv::Mat &src, const std::vector<EnergyPart> &armors);
void showBothContours(std::string windows_name, const cv::Mat &src, const std::vector<EnergyPart> &fans,
const std::vector<EnergyPart> &armors);
bool isValidFanContour(const vector<cv::Point> &fan_contour);
bool isValidArmorContour(const vector<cv::Point> &armor_contour);
void getFanPosition(std::vector<float> &fanPosition, const std::vector<EnergyPart> &fans, cv::Point cycle_center, double radius);
void getArmorPosition(std::vector<float> &armorPosition, const std::vector<EnergyPart> &armors, cv::Point cycle_center, double radius);
void getFirstArmorCenters(vector<EnergyPart> &armors, std::vector<cv::Point> &first_armor_centers);
void getAllArmorCenters();
void getPosition(cv::Point point, double &angle);
void cycleQuickCalculate(std::vector<cv::Point> &first_armor_centers, cv::Point &cycle_center, double &radius);
void cycleDefaultCalculateConst(cv::Point &cycle_center, double &radius);
void cycleCalculate();
void cycleLeastFit();
void findTarget(const std::vector<float>fanPosition, const std::vector<float>armorPosition, float &target_armor);
void findWholeCycle(const std::vector<cv::Point>&first_armor_centers);
void saveFourPoints(std::vector<cv::Point> &FourPoints, cv::Point point_1, cv::Point point_2, cv::Point point_3, cv::Point point_4);
void savePoint2f(std::vector<cv::Point2f> &point_save, cv::Point point);
double pointDistance(cv::Point point_1, cv::Point point_2);
void rotate(double rad, double radius, cv::Point center, cv::Point point_old, cv::Point &point_new);
void stretch(cv::Point point_1, cv::Point2f &point_2);
void cycle(cv::Point p1, cv::Point p2, cv::Point p3, cv::Point &center, double &radius);
void getHitPoint();
bool changeTarget();
void gimbleRotation();
void splitBayerBG(cv::Mat &src, cv::Mat &blue, cv::Mat &red);
void imagePreprocess(cv::Mat &src);
void StructingElementClose(cv::Mat &src);
void StructingElementErodeDilate(cv::Mat &src);
};
#endif //ENERGY_H

View File

@@ -0,0 +1,60 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#ifndef PARAM_STRUCT_DEFINE_H
#define PARAM_STRUCT_DEFINE_H
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <vector>
using std::vector;
struct EnergyPart {
cv::RotatedRect rect;
float angle;
vector<cv::Point> contour;
explicit EnergyPart(vector<cv::Point> &c) : contour(c) {
rect = cv::minAreaRect(c);
angle = cv::minAreaRect(c).angle;
};
};
struct EnergyPartParam {
double RPM;
double HIT_TIME;
int GRAY_THRESH;
int SPLIT_GRAY_THRESH;
int FAN_GRAY_THRESH;
int ARMOR_GRAY_THRESH;
long FAN_CONTOUR_AREA_MAX;
long FAN_CONTOUR_AREA_MIN;
long FAN_CONTOUR_LENGTH_MIN;
long FAN_CONTOUR_WIDTH_MIN;
float FAN_CONTOUR_HW_RATIO_MAX;
float FAN_CONTOUR_HW_RATIO_MIN;
long ARMOR_CONTOUR_AREA_MAX;
long ARMOR_CONTOUR_AREA_MIN;
long ARMOR_CONTOUR_LENGTH_MIN;
long ARMOR_CONTOUR_WIDTH_MIN;
long ARMOR_CONTOUR_LENGTH_MAX;
long ARMOR_CONTOUR_WIDTH_MAX;
float ARMOR_CONTOUR_HW_RATIO_MAX;
float ARMOR_CONTOUR_HW_RATIO_MIN;
float TWIN_ANGEL_MAX;
};
typedef struct GMAngle_t{
float yaw;
float pitch;
}GMAngle_t;
extern GMAngle_t aim;
#endif //PARAM_STRUCT_DEFINE_H

View File

@@ -0,0 +1,106 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::cycleQuickCalculate(std::vector<cv::Point> &first_armor_centers, cv::Point &cycle_center, double &radius) {
//if (isCalibrated()) return;
int cur_size = static_cast<int>(first_armor_centers.size());
if (cur_size < 3)return;
cv::Point point_1, point_2, point_3;
point_1 = first_armor_centers.at(0);
point_2 = first_armor_centers.at(static_cast<unsigned long>(cur_size - 1));
point_3 = first_armor_centers.at(static_cast<unsigned long>(cur_size / 2));
//cout << point_1 << '\t' << point_2 << endl;
//cout << first_armor_centers.at(point_1) << '\t' << first_armor_centers.at(point_2) << endl;
cycle(point_1, point_2, point_3, cycle_center, radius);
cout << "The cycle center is: " << cycle_center << endl;
cout << "The radius is: " << radius << endl;
}
void Energy::cycleDefaultCalculateConst(cv::Point &cycle_center, double &radius) {
if (count >= 5)
return;
clock_t end;
double time_duration = 1.0, dt;
if (Armor_center.size() < 3) {
end = clock();
dt = (end - start) / 1000000.00;
if (dt >= time_duration * count) {
getFirstArmorCenters(armors, Armor_center);
count++;
}
}
else {
cycleQuickCalculate(Armor_center, cycle_center, radius);
count++;
}
}
void Energy::cycleCalculate(){
cycle_center.x = (top.x + bottom.x)/2;
cycle_center.y = (left.y + right.y)/2;
radius = (right.x - left.x)/2;
cout << "The cycle center is: " << cycle_center << endl;
cout << "The radius is: " << radius << endl;
}
void Energy::cycleLeastFit()
{
cycle_center.x = 0;
cycle_center.y = 0;
radius = 0.0f;
if (all_armor_centers.size() < 3)
{
cout<<"Cannot calculate a circle"<<endl;
return;
}
double sum_x = 0.0f, sum_y = 0.0f;
double sum_x2 = 0.0f, sum_y2 = 0.0f;
double sum_x3 = 0.0f, sum_y3 = 0.0f;
double sum_xy = 0.0f, sum_x1y2 = 0.0f, sum_x2y1 = 0.0f;
int N = static_cast<int>(all_armor_centers.size());
for (int i = 0; i < N; i++)
{
double x = all_armor_centers.at(i).x;
double y = all_armor_centers.at(i).y;
double x2 = x * x;
double y2 = y * y;
sum_x += x;
sum_y += y;
sum_x2 += x2;
sum_y2 += y2;
sum_x3 += x2 * x;
sum_y3 += y2 * y;
sum_xy += x * y;
sum_x1y2 += x * y2;
sum_x2y1 += x2 * y;
}
double C, D, E, G, H;
double a, b, c;
C = N * sum_x2 - sum_x * sum_x;
D = N * sum_xy - sum_x * sum_y;
E = N * sum_x3 + N * sum_x1y2 - (sum_x2 + sum_y2) * sum_x;
G = N * sum_y2 - sum_y * sum_y;
H = N * sum_x2y1 + N * sum_y3 - (sum_x2 + sum_y2) * sum_y;
a = (H * D - E * G) / (C * G - D * D);
b = (H * C - E * D) / (D * D - G * C);
c = -(a * sum_x + b * sum_y + sum_x2 + sum_y2) / N;
cycle_center.x = static_cast<int>(a / (-2));
cycle_center.y = static_cast<int>(b / (-2));
radius = sqrt(a * a + b * b - 4 * c) / 2;
cout << "The cycle center is: " << cycle_center << endl;
cout << "The radius is: " << radius << endl;
}

View File

@@ -0,0 +1,59 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::splitBayerBG(cv::Mat &src, cv::Mat &blue, cv::Mat &red) {
uchar* data;
uchar* bayer_data[2];
for (int i = 0; i < src.rows; ++i) {
data = src.ptr<uchar>(i);
bayer_data[0] = blue.ptr<uchar>(i / 2);
for (int j = 0; j < blue.cols; ++j, data += 2) {
bayer_data[0][j] = *data;
}
data = src.ptr<uchar>(++i) + 1;
bayer_data[1] = red.ptr<uchar>(i / 2);
for (int j = 0; j < red.cols; ++j, data += 2) {
bayer_data[1][j] = *data;
}
}
}
void Energy::imagePreprocess(cv::Mat &src) {
if(src.type() == CV_8UC1)
{
splitBayerBG(src, src_blue, src_red);
if(ally_color_ == ALLY_RED)
{
src = src_red - src_blue;
}else if(ally_color_ == ALLY_BLUE){
src = src_blue - src_red;
}
}
else if(src.type() == CV_8UC3)
{
std::vector<Mat> channels;
split(src, channels);
resize(channels.at(0), src_blue, Size(SRC_WIDTH, SRC_HEIGHT));
resize(channels.at(1), src_green, Size(SRC_WIDTH, SRC_HEIGHT));
resize(channels.at(2), src_red, Size(SRC_WIDTH, SRC_HEIGHT));
if(ally_color_ == ALLY_RED)
{
src = src_red-src_blue;
//src=src_red;
}else if(ally_color_ == ALLY_BLUE){
src = src_blue-src_red;
//src=src_blue;
}
}
cv::resize(src, src, cv::Size(640, 480), 2);
threshold(src, src, energy_part_param_.SPLIT_GRAY_THRESH, 255, THRESH_BINARY);
}

View File

@@ -0,0 +1,26 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::StructingElementClose(cv::Mat &src){
if (src.empty())return;
//threshold(src, src, energy_part_param_.CAMERA_GRAY_THRESH, 255, THRESH_BINARY);
Mat element = getStructuringElement(MORPH_RECT, Size(4, 4));
morphologyEx(src, src, MORPH_CLOSE, element);
}
void Energy::StructingElementErodeDilate(cv::Mat &src) {
cv::Mat src_out, src_out_out;
Mat element_erode = getStructuringElement(MORPH_RECT, Size(4, 4));
Mat element_dilate = getStructuringElement(MORPH_RECT, Size(20, 20));
erode(src,src_out, element_erode);
imshow("erode", src_out);
dilate(src_out, src_out_out, element_dilate);
imshow("dilate", src_out_out);
}

View File

@@ -0,0 +1,64 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
Energy::Energy(Uart &u):uart(u),
src_blue(SRC_HEIGHT, SRC_WIDTH, CV_8UC1),
src_red(SRC_HEIGHT, SRC_WIDTH, CV_8UC1)
{
fans_cnt = 0;
armors_cnt = 0;
cycle_center = Point(0, 0);
target_center = Point(0, 0);
last_target_center = Point(0, 0);
hit_point = Point(0,0);
target_position = -1;
last_target_position = -1;
target_armor = -1;
radius = 0;
ally_color_ = ALLY_RED;
energy_part_rotation = CLOCKWISE;
attack_distance = ATTACK_DISTANCE;
count = 1;
last_fans_cnt = 0;
last_armors_cnt = 0;
send_cnt = 0;
rectified_focal_length = 1000;
theta = 0;
phi = 0;
yaw_rotation = 0;
pitch_rotation = 0;
isLeftVertexFound = -1;
isTopVertexFound = -1;
isRightVertexFound = -1;
isBottomVertexFound = -1;
left = Point(640, 480);
right = Point(0, 0);
top = Point(640, 480);
bottom = Point(0, 0);
initEnergyPartParam();
}
Energy::~Energy() = default;
void Energy::setAllyColor(int color)
{
ally_color_ = color;
}
void Energy::setRotation(int rotation){
energy_part_rotation = rotation;
}

View File

@@ -0,0 +1,89 @@
//
// Created by xixiliadorabarry on 19-3-23.
//
#include "energy/energy.h"
#include <iostream>
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::findWholeCycle(const std::vector<cv::Point>&first_armor_centers) {
int cur_size = static_cast<int>(first_armor_centers.size());
//cout << "first armor centers' size: " << first_armor_centers.size() << endl;
if (cur_size == 0)return;
int vertex = 0;
for (int i = 1; i < cur_size - 1; ++i)
{
if (vertex == 4)break;
if (first_armor_centers.at(i).x >= first_armor_centers.at(i - 1).x && first_armor_centers.at(i).x >= first_armor_centers.at(i + 1).x)
{
if (isRightVertexFound == -1) {
vertex += 1;
isRightVertexFound = 1;
right = first_armor_centers.at(i);
cout << "right vertex: " << right << endl;
continue;
}
else if (right.x > first_armor_centers.at(i).x)continue;
else {
right = first_armor_centers.at(i);
continue;
}
}
if (first_armor_centers.at(i).x <= first_armor_centers.at(i - 1).x && first_armor_centers.at(i).x <= first_armor_centers.at(i + 1).x)
{
if (isLeftVertexFound == -1) {
vertex += 1;
isLeftVertexFound = 1;
left = first_armor_centers.at(i);
cout << "left vertex: " << left << endl;
continue;
}
else if (left.x < first_armor_centers.at(i).x)continue;
else {
left = first_armor_centers.at(i);
continue;
}
}
if (first_armor_centers.at(i).y <= first_armor_centers.at(i - 1).y && first_armor_centers.at(i).y <= first_armor_centers.at(i + 1).y)
{
if (isTopVertexFound == -1) {
vertex += 1;
isTopVertexFound = 1;
top = first_armor_centers.at(i);
cout << "top vertex: " << top << endl;
continue;
}
else if (top.y < first_armor_centers.at(i).y)continue;
else {
top = first_armor_centers.at(i);
continue;
}
}
if (first_armor_centers.at(i).y >= first_armor_centers.at(i - 1).y && first_armor_centers.at(i).y >= first_armor_centers.at(i + 1).y)
{
if (isBottomVertexFound == -1) {
vertex += 1;
isBottomVertexFound = 1;
bottom = first_armor_centers.at(i);
cout << "bottom vertex: " << bottom << endl;
continue;
}
else if (bottom.y > first_armor_centers.at(i).y)continue;
else {
bottom = first_armor_centers.at(i);
continue;
}
}
}
/*for (int k = 0; k < first_armor_centers.size(); ++k) {
cout << k << " : " << first_armor_centers.at(k) << '\t';
float angle = static_cast<float>(180 / PI * atan2(-1 * (first_armor_centers.at(k).y - 298), (first_armor_centers.at(k).x - 298)));
cout << angle << endl;
}*/
}

View File

@@ -0,0 +1,222 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
int Energy::findFan(const cv::Mat &src, vector<EnergyPart> &fans, int &last_fans_cnt) {
if (src.empty())return 0;
static Mat src_bin;
src_bin = src.clone();
// threshold(src, src_bin, energy_part_param_.FAN_GRAY_THRESH, 255, THRESH_BINARY);
if(src.type() == CV_8UC3){
cvtColor(src_bin, src_bin, CV_BGR2GRAY);
}
std::vector<vector<Point> > fan_contours;
StructingElementClose(src_bin);
// imshow("fan struct",src_bin);
findContours(src_bin, fan_contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
for (auto &fan_contour : fan_contours) {
if (!isValidFanContour(fan_contour)) {
continue;
}
// double cur_contour_area = contourArea(fan_contour);
// RotatedRect cur_rect = minAreaRect(fan_contour);
// Size2f cur_size = cur_rect.size;
//
// cout<<"cur_contour_area: "<<cur_contour_area<<'\t'<<"rect_area: "<<cur_size.area()<<'\t'<<"ratio: "<<cur_contour_area/cur_size.area()<<endl;
// float length = cur_size.height > cur_size.width ? cur_size.height : cur_size.width;
// float width = cur_size.height < cur_size.width ? cur_size.height : cur_size.width;
// if(length>5&&width>5){
// cout<<cur_rect.center;
// fans.emplace_back(fan_contour);
// cout<<"fan area: "<<length<<'\t'<<width<<endl;
// }
fans.emplace_back(fan_contour);
// cout<<"fan area: "<<length<<'\t'<<width<<endl;
}
if(fans.size() < last_fans_cnt){
last_fans_cnt = static_cast<int>(fans.size());
return -1;
}
last_fans_cnt = static_cast<int>(fans.size());
return static_cast<int>(fans.size());
}
int Energy::findArmor(const cv::Mat &src, vector<EnergyPart> &armors, int &last_armors_cnt) {
if (src.empty())return 0;
static Mat src_bin;
src_bin = src.clone();
// threshold(src, src_bin, energy_part_param_.ARMOR_GRAY_THRESH, 255, THRESH_BINARY);
if(src.type() == CV_8UC3){
cvtColor(src_bin, src_bin, CV_BGR2GRAY);
}
std::vector<vector<Point> > armor_contours;
std::vector<vector<Point> > armor_contours_external;//用总轮廓减去外轮廓,只保留内轮廓,除去流动条的影响。
StructingElementClose(src_bin);
// imshow("armor struct",src_bin);
findContours(src_bin, armor_contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
findContours(src_bin, armor_contours_external, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
for (int i = 0; i < armor_contours_external.size(); i++)//去除外轮廓
{
unsigned long external_contour_size = armor_contours_external[i].size();
for (int j = 0; j < armor_contours.size(); j++)
{
unsigned long all_size = armor_contours[j].size();
if (external_contour_size == all_size)
{
swap(armor_contours[j], armor_contours[armor_contours.size() - 1]);
armor_contours.pop_back();
break;
}
}
}
for (auto &armor_contour : armor_contours) {
if (!isValidArmorContour(armor_contour))
{
continue;
}
RotatedRect cur_rect = minAreaRect(armor_contour);
Size2f cur_size = cur_rect.size;
float length = cur_size.height > cur_size.width ? cur_size.height : cur_size.width;
float width = cur_size.height < cur_size.width ? cur_size.height : cur_size.width;
// if(length>10&&width>10){
// armors.emplace_back(armor_contour);
// cout<<"armor area: "<<length<<'\t'<<width<<endl;
// }
armors.emplace_back(armor_contour);
cout<<"armor area: "<<length<<'\t'<<width<<endl;
}
if(armors.size() < last_armors_cnt){
last_armors_cnt = static_cast<int>(armors.size());
return -1;
}
last_armors_cnt = static_cast<int>(armors.size());
return static_cast<int>(armors.size());
}
int Energy::findGimbleZeroPoint(const cv::Mat &src, vector<EnergyPart> &gimble_zero_points) {
if (src.empty())return 0;
static Mat src_bin;
src_bin = src.clone();
// threshold(src, src_bin, energy_part_param_.FAN_GRAY_THRESH, 255, THRESH_BINARY);
if(src.type() == CV_8UC3){
cvtColor(src_bin, src_bin, CV_BGR2GRAY);
}
std::vector<vector<Point> > zero_point_contours;
findContours(src_bin, zero_point_contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
for (auto &zero_point_contour : zero_point_contours) {
double cur_contour_area = contourArea(zero_point_contour);
RotatedRect cur_rect = minAreaRect(zero_point_contour);
Size2f cur_size = cur_rect.size;
// cout<<"cur_contour_area: "<<cur_contour_area<<'\t'<<"rect_area: "<<cur_size.area()<<'\t'<<"ratio: "<<cur_contour_area/cur_size.area()<<endl;
float length = cur_size.height > cur_size.width ? cur_size.height : cur_size.width;
float width = cur_size.height < cur_size.width ? cur_size.height : cur_size.width;
if(length<10&&width<10&&length>1&&width>1){
cout<<"zero point center: "<<cur_rect.center<<endl;
cout<<"zero point area: "<<length<<'\t'<<width<<endl;
gimble_zero_points.emplace_back(zero_point_contour);
}
}
return static_cast<int>(fans.size());
}
bool Energy::isValidFanContour(const vector<cv::Point> &fan_contour) {
double cur_contour_area = contourArea(fan_contour);
if (cur_contour_area > energy_part_param_.FAN_CONTOUR_AREA_MAX ||
cur_contour_area < energy_part_param_.FAN_CONTOUR_AREA_MIN)
{
//cout<<cur_contour_area<<" "<<energy_fan_param_.CONTOUR_AREA_MIN<<" "<<energy_fan_param_.CONTOUR_AREA_MAX<<endl;
//cout<<"area fail."<<endl;
return false;
}
RotatedRect cur_rect = minAreaRect(fan_contour);
Size2f cur_size = cur_rect.size;
float length = cur_size.height > cur_size.width ? cur_size.height : cur_size.width;
float width = cur_size.height < cur_size.width ? cur_size.height : cur_size.width;
if (length < energy_part_param_.FAN_CONTOUR_LENGTH_MIN || width < energy_part_param_.FAN_CONTOUR_WIDTH_MIN)
{
//cout<<"length width min fail."<<endl;
return false;
}
// float length_width_ratio = length / width;
// if (length_width_ratio > energy_part_param_.FAN_CONTOUR_HW_RATIO_MAX ||
// length_width_ratio < energy_part_param_.FAN_CONTOUR_HW_RATIO_MIN)
// {
// //cout<<"length width ratio fail."<<endl;
// return false;
// }
if (cur_contour_area / cur_size.area() < 0.6) return false;
return true;
}
bool Energy::isValidArmorContour(const vector<cv::Point> &armor_contour) {
double cur_contour_area = contourArea(armor_contour);
// if (cur_contour_area > energy_part_param_.ARMOR_CONTOUR_AREA_MAX ||
// cur_contour_area < energy_part_param_.ARMOR_CONTOUR_AREA_MIN)
// {
// //cout<<cur_contour_area<<" "<<energy_fan_param_.CONTOUR_AREA_MIN<<" "<<energy_fan_param_.CONTOUR_AREA_MAX<<endl;
// //cout<<"area fail."<<endl;
// return false;
// }
RotatedRect cur_rect = minAreaRect(armor_contour);
Size2f cur_size = cur_rect.size;
float length = cur_size.height > cur_size.width ? cur_size.height : cur_size.width;
float width = cur_size.height < cur_size.width ? cur_size.height : cur_size.width;
if (length < energy_part_param_.ARMOR_CONTOUR_LENGTH_MIN || width < energy_part_param_.ARMOR_CONTOUR_WIDTH_MIN)
{
//cout<<"length width min fail."<<endl;
return false;
}
if (length > energy_part_param_.ARMOR_CONTOUR_LENGTH_MAX||width>energy_part_param_.ARMOR_CONTOUR_WIDTH_MAX)
{
//cout<<"length width max fail."<<endl;
return false;
}
float length_width_ratio = length / width;
if (length_width_ratio > energy_part_param_.ARMOR_CONTOUR_HW_RATIO_MAX ||
length_width_ratio < energy_part_param_.ARMOR_CONTOUR_HW_RATIO_MIN)
{
//cout<<"length width ratio fail."<<endl;
return false;
}
if (cur_contour_area / cur_size.area() < 0.7) return false;
return true;
}

View File

@@ -0,0 +1,61 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::findTarget(std::vector<float>fanPosition, std::vector<float>armorPosition, float &target_armor) {
if (fanPosition.size() >= armorPosition.size()) return;
if (armorPosition.size()==0)return;
if (fanPosition.size() == 0) {
target_armor = armorPosition.at(0);
for (const auto &armor : armors)
{
target_center = armor.rect.center;
// cout<<"target center: "<<target_center<<endl;
}
return;
}
sort(fanPosition.begin(), fanPosition.end());
/*for (vector<float>::iterator it = fanPosition.begin(); it != fanPosition.end(); it++) {
cout << *it << endl;
}*/
sort(armorPosition.begin(), armorPosition.end());
/*for (vector<float>::iterator it = armorPosition.begin(); it != armorPosition.end(); it++) {
cout << *it << endl;
}*/
int i, j = 0;
for (i = 0; i < fanPosition.size(); ++i) {
if (armorPosition.at(i) - fanPosition.at(j) < energy_part_param_.TWIN_ANGEL_MAX && armorPosition.at(i) - fanPosition.at(j) > -1 * energy_part_param_.TWIN_ANGEL_MAX) {
j++;
continue;
}
else {
target_armor = armorPosition.at(j);
for (const auto &armor : armors)
{
float angle = static_cast<float>(180 / PI * atan2(-1 * (armor.rect.center.y - cycle_center.y), (armor.rect.center.x - cycle_center.x)));
if(target_armor==angle){
target_center = armor.rect.center;
// cout<<"target center: "<<target_center<<endl;
}
}
return;
}
}
target_armor = armorPosition.at(armorPosition.size() - 1);
for (const auto &armor : armors)
{
float angle = static_cast<float>(180 / PI * atan2(-1 * (armor.rect.center.y - cycle_center.y), (armor.rect.center.x - cycle_center.x)));
if(target_armor == angle){
target_center = armor.rect.center;
// cout<<"target center: "<<target_center<<endl;
}
}
}

View File

@@ -0,0 +1,26 @@
//
// Created by xixiliadorabarry on 19-3-23.
//
#include "energy/energy.h"
#include <cmath>
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::gimbleRotation(){
//该方法用于标定激光零点的情况,对操作手友好,但建立在云台稳定情况下
// yaw_rotation = static_cast<float>(180 / PI * atan2(-1*STRETCH*(hit_point.x-ZERO_POINT_X), ATTACK_DISTANCE));
// pitch_rotation = static_cast<float>(180 / PI * atan2((ATTACK_DISTANCE*tan(PITCH_ORIGIN_RAD)-STRETCH*(hit_point.y-ZERO_POINT_Y)), ATTACK_DISTANCE));
//该方法用于操作手自己完成对心工作的情况,对操作手要求高
cv::Point2f real_hit_point;
stretch(hit_point, real_hit_point);
// yaw_rotation = static_cast<float>(180 / PI * atan2((ATTACK_DISTANCE*tan(YAW_ORIGIN_RAD)-real_hit_point.x), ATTACK_DISTANCE));
// pitch_rotation = static_cast<float>(180 / PI * atan2((ATTACK_DISTANCE*tan(PITCH_ORIGIN_RAD)-real_hit_point.y), ATTACK_DISTANCE));
yaw_rotation = static_cast<float>(180 / PI * atan2((ATTACK_DISTANCE*tan(aim.yaw)-real_hit_point.x), ATTACK_DISTANCE));
pitch_rotation = static_cast<float>(180 / PI * atan2((ATTACK_DISTANCE*tan(aim.pitch)-real_hit_point.y), ATTACK_DISTANCE));
}

View File

@@ -0,0 +1,31 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
#include "energy/constant.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::getHitPoint(){
// if(!changeTarget()){
// //last_target_center = target_center;
// //last_target_position = target_position;
// return;
// }
// else {
double rad = static_cast<double>(energy_part_rotation * energy_part_param_.RPM
* energy_part_param_.HIT_TIME * 360 / 60);
rotate(rad, radius, cycle_center, target_center, hit_point);
//last_target_center = target_center;
//last_target_position = target_position;
// }
}
bool Energy::changeTarget(){
return !(pointDistance(target_center, last_target_center) < 25 || fabs(target_position - last_target_position) < 30
||fabs(target_position - last_target_position) > 330);
}

View File

@@ -0,0 +1,49 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::getFanPosition(std::vector<float> &fanPosition, const std::vector<EnergyPart> &fans, cv::Point cycle_center, double radius) {
if (radius == 0)return;
for (const auto &fan : fans)
{
float angle = static_cast<float>(180 / PI * atan2(-1 * (fan.rect.center.y - cycle_center.y), (fan.rect.center.x - cycle_center.x)));
fanPosition.push_back(angle);
}
cout << "fanPosition.size() = " << fanPosition.size() << '\t' << endl;
}
void Energy::getArmorPosition(std::vector<float> &armorPosition, const std::vector<EnergyPart> &armors, cv::Point cycle_center, double radius) {
if (radius == 0)return;
for (const auto &armor : armors)
{
float angle = static_cast<float>(180 / PI * atan2(-1 * (armor.rect.center.y - cycle_center.y), (armor.rect.center.x - cycle_center.x)));
armorPosition.push_back(angle);
}
cout << "armorPosition.size() = " << armorPosition.size() << '\t' << endl;
}
void Energy::getFirstArmorCenters(vector<EnergyPart> &armors, std::vector<cv::Point> &first_armor_centers)
{
for (const auto &armor : armors) {
if (armors.size() < 2)first_armor_centers.push_back(armor.rect.center);
}
}
void Energy::getAllArmorCenters()
{
for (const auto &armor : armors) {
all_armor_centers.push_back(armor.rect.center);
}
}
void Energy::getPosition(cv::Point point, double &angle){
if (radius == 0)return;
angle = (180 / PI * atan2(-1 * (point.y - cycle_center.y), (point.x - cycle_center.x)));
}

View File

@@ -0,0 +1,42 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::initEnergyPartParam() {
energy_part_param_.RPM = 10;
energy_part_param_.HIT_TIME = 1.0;
energy_part_param_.GRAY_THRESH = 240;
energy_part_param_.SPLIT_GRAY_THRESH = 80;
energy_part_param_.FAN_GRAY_THRESH = 75;
energy_part_param_.ARMOR_GRAY_THRESH = 80;
energy_part_param_.FAN_CONTOUR_AREA_MAX = 17000;
energy_part_param_.FAN_CONTOUR_AREA_MIN = 7000;
energy_part_param_.FAN_CONTOUR_LENGTH_MIN = 150;
energy_part_param_.FAN_CONTOUR_WIDTH_MIN = 50;
energy_part_param_.FAN_CONTOUR_HW_RATIO_MAX = 4;
energy_part_param_.FAN_CONTOUR_HW_RATIO_MIN = 1;
energy_part_param_.ARMOR_CONTOUR_AREA_MAX = 100000;
energy_part_param_.ARMOR_CONTOUR_AREA_MIN = 0;
energy_part_param_.ARMOR_CONTOUR_LENGTH_MIN = 50;
energy_part_param_.ARMOR_CONTOUR_WIDTH_MIN = 25;
energy_part_param_.ARMOR_CONTOUR_LENGTH_MAX = 80;
energy_part_param_.ARMOR_CONTOUR_WIDTH_MAX = 50;
energy_part_param_.ARMOR_CONTOUR_HW_RATIO_MAX = 3;
energy_part_param_.ARMOR_CONTOUR_HW_RATIO_MIN = 1;
energy_part_param_.TWIN_ANGEL_MAX = 10;
}

76
energy/src/energy/run.cpp Normal file
View File

@@ -0,0 +1,76 @@
//
// Created by xixiliadorabarry on 3/5/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
int Energy::run(cv::Mat &src){
fans.clear();
armors.clear();
fanPosition.clear();
armorPosition.clear();
gimble_zero_points.clear();
// if(all_armor_centers.size()>200)all_armor_centers.clear();
// if(first_armor_centers.size()>200)first_armor_centers.clear();
// cout<<"first_armor_centers.size(): "<<first_armor_centers.size()<<endl;
// imagePreprocess(src);
// imshow("img_preprocess",src);
threshold(src, src, energy_part_param_.GRAY_THRESH, 255, THRESH_BINARY);
// imshow("bin",src);
fans_cnt = findFan(src, fans, last_fans_cnt);
// cout<<"fans_cnt: "<<fans_cnt<<endl;
if(fans_cnt==-1) return 0;//滤去漏判的帧
// if(fans_cnt>0)showFanContours("fan",src,fans);
// fans_cnt=0;
armors_cnt = findArmor(src, armors, last_armors_cnt);
// cout<<"armors_cnt: "<<armors_cnt<<endl;
if(armors_cnt==-1) return 0;//滤去漏判的帧
// if(armors_cnt>0) showArmorContours("armor",src,armors);
if(armors_cnt>0||fans_cnt>0) showBothContours("Both",src, fans, armors);
if(armors_cnt != fans_cnt+1) return 0;
//此处用于标定云台在摄像头视频中的零点
// findGimbleZeroPoint(src,gimble_zero_points);
// cout<<"gimble zero points: :"<<gimble_zero_points.size()<<endl;
// showFanContours("zero",src,gimble_zero_points);
getAllArmorCenters();
cout<<"all_armor_centers.size(): "<<all_armor_centers.size()<<endl;
cycleLeastFit();
// cycle_center = cv::Point(248,247);
// radius = 208.439;
getFanPosition(fanPosition, fans, cycle_center, radius);
getArmorPosition(armorPosition, armors, cycle_center, radius);
findTarget(fanPosition, armorPosition, target_armor);
cout << "The target armor's position is " << target_armor << endl;
cout<<"The target armor center is: "<<target_center<<endl;
getHitPoint();
cout << "The hit point position is " << hit_point << endl;
// hit_point = cycle_center;
gimbleRotation();
sendTargetByUart(yaw_rotation, pitch_rotation, attack_distance);
cout<<"yaw: "<<yaw_rotation<<'\t'<<"pitch: "<<pitch_rotation<<endl;
// cout<<"send_cnt: "<<send_cnt<<endl;
}

View File

@@ -0,0 +1,15 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
void Energy::sendTargetByUart(float x, float y, float z) {
// if(!changeTarget()){
// return;
// }
uart.sendTarget(x, y, z);
send_cnt+=1;
}

View File

@@ -0,0 +1,96 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::showFanContours(std::string windows_name, const cv::Mat &src, const std::vector<EnergyPart> &fans) {
if (src.empty())return;
static Mat image2show;
if(src.type() == CV_8UC1) // 黑白图像
{
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) //RGB 彩色
{
image2show = src.clone();
}
//cvtColor(image2show, image2show, COLOR_GRAY2RGB);
for (const auto &fan : fans)
{
Point2f vertices[4]; //定义矩形的4个顶点
fan.rect.points(vertices); //计算矩形的4个顶点
for (int i = 0; i < 4; i++)
line(image2show, vertices[i], vertices[(i + 1) % 4], Scalar(255, 0, 0), 2);
//cout << fan.rect.center << '\t' << fan.rect.angle << '\t';
//cout << endl;
}
imshow(windows_name, image2show);
}
void Energy::showArmorContours(std::string windows_name, const cv::Mat &src, const std::vector<EnergyPart> &armors) {
if (src.empty())return;
static Mat image2show;
if(src.type() == CV_8UC1) // 黑白图像
{
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) //RGB 彩色
{
image2show = src.clone();
}
//cvtColor(image2show, image2show, COLOR_GRAY2RGB);
for (const auto &armor : armors)
{
Point2f vertices[4]; //定义矩形的4个顶点
armor.rect.points(vertices); //计算矩形的4个顶点
for (int i = 0; i < 4; i++)
line(image2show, vertices[i], vertices[(i + 1) % 4], Scalar(0, 0, 255), 2);
//cout << armor.rect.center << '\t' << armor.rect.angle << '\t';
//cout << endl;
}
imshow(windows_name, image2show);
}
void Energy::showBothContours(std::string windows_name, const cv::Mat &src, const std::vector<EnergyPart> &fans,
const std::vector<EnergyPart> &armors) {
if (src.empty())return;
static Mat image2show;
if(src.type() == CV_8UC1) // 黑白图像
{
cvtColor(src, image2show, COLOR_GRAY2RGB);
} else if (src.type() == CV_8UC3) //RGB 彩色
{
image2show = src.clone();
}
//cvtColor(image2show, image2show, COLOR_GRAY2RGB);
for (const auto &fan : fans)
{
Point2f vertices[4]; //定义矩形的4个顶点
fan.rect.points(vertices); //计算矩形的4个顶点
for (int i = 0; i < 4; i++)
line(image2show, vertices[i], vertices[(i + 1) % 4], Scalar(255, 0, 0), 4);
// cout << "fan" << fan.rect.size.height <<'\t'<< fan.rect.size.width << '\t' << '\t';
// cout << endl;
}
for (const auto &armor : armors)
{
Point2f vertices[4]; //定义矩形的4个顶点
armor.rect.points(vertices); //计算矩形的4个顶点
for (int i = 0; i < 4; i++)
line(image2show, vertices[i], vertices[(i + 1) % 4], Scalar(0, 0, 255), 4);
// cout << "armor center: "<< armor.rect.center << '\t'<< "armor angle: "<< armor.rect.angle;
// cout << endl;
}
imshow(windows_name, image2show);
}

View File

@@ -0,0 +1,88 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "energy/energy.h"
#include <iostream>
#include <algorithm>
#include <cmath>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
using namespace cv;
using std::cout;
using std::endl;
using std::vector;
void Energy::extract(cv::Mat &src){
cv::Rect rect(EXTRACT_POINT_X, EXTRACT_POINT_Y, EXTRACT_WIDTH, EXTRACT_HEIGHT);
src = src(rect).clone();
cv::resize(src, src, cv::Size(640, 480), 2);
imshow("extract", src);
}
void Energy::saveFourPoints(std::vector<cv::Point> &FourPoints, cv::Point point_1, cv::Point point_2, cv::Point point_3, cv::Point point_4) {
FourPoints.push_back(point_1);
FourPoints.push_back(point_2);
FourPoints.push_back(point_3);
FourPoints.push_back(point_4);
}
void Energy::savePoint2f(std::vector<cv::Point2f> &point_save, cv::Point point) {
point_save.push_back(static_cast<cv::Point2f>(point));
}
double Energy::pointDistance(cv::Point point_1, cv::Point point_2){
double distance = 0;
distance = sqrt(pow(static_cast<double>(point_1.x - point_2.x),2)
+ pow(static_cast<double>(point_1.y - point_2.y),2));
return distance;
}
void Energy::rotate(double rad, double radius, cv::Point center, cv::Point point_old, cv::Point &point_new) {
int x1, x2, y1, y2;
// 为了减小强制转换的误差
x1 = center.x * 100;
x2 = point_old.x * 100;
y1 = center.y * 100;
y2 = point_old.y * 100;
point_new.x = static_cast<int>((x1 + (x2 - x1)*cos(-rad * d2r) - (y1 - y2)*sin(-rad * d2r))/100);
point_new.y = static_cast<int>((y1 - (x2 - x1)*sin(-rad * d2r) - (y1 - y2)*cos(-rad * d2r))/100);
}
void Energy::stretch(cv::Point point_1, cv::Point2f &point_2){
if(point_1==cycle_center){
cout<<"stretch wrong!"<<endl;
return;
}
double x_0 = point_1.x - cycle_center.x;
double y_0 = point_1.y - cycle_center.y;
double r_0 = sqrt(pow(x_0, 2)+ pow(y_0, 2));
point_2.x = static_cast<float >( ARMOR_CENTER_TO_CYCLE_CENTER * x_0 / r_0);
point_2.y = static_cast<float >( ARMOR_CENTER_TO_CYCLE_CENTER * y_0 / r_0);
}
void Energy::cycle(cv::Point p1, cv::Point p2, cv::Point p3, cv::Point &center, double &radius){
double x1, y1, x2, y2, x3, y3;
double a, b, c, g, e, f;
x1 = p1.x;
y1 = p1.y;
x2 = p2.x;
y2 = p2.y;
x3 = p3.x;
y3 = p3.y;
//三点确定圆的方程:(2x2-2x1)X+(2y2-2y1)Y=x2²-x1²+y2²-y1²(2x3-2x2)X+(2y3-2y2)Y=x3²-x2²+y3²-y2²
e = 2 * (x2 - x1);
f = 2 * (y2 - y1);
g = x2 * x2 - x1 * x1 + y2 * y2 - y1 * y1;
a = 2 * (x3 - x2);
b = 2 * (y3 - y2);
c = x3 * x3 - x2 * x2 + y3 * y3 - y2 * y2;
cycle_center.x = static_cast<int>((g*b - c * f) / (e*b - a * f));
cycle_center.y = static_cast<int>((a*g - c * e) / (a*f - b * e));
radius = sqrt((cycle_center.x - x1)*(cycle_center.x - x1) + (cycle_center.y - y1)*(cycle_center.y - y1));
}

2823
include/camera/camera_api.h Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,663 @@
#pragma once
#ifndef _CAMERA_DEFINE_H_
#define _CAMERA_DEFINE_H_
#include "camera_status.h"
#define MAX_CROSS_LINE 9
//相机的句柄类型定义
typedef int CameraHandle;
typedef int INT;
typedef long LONG;
typedef unsigned int UINT;
typedef unsigned long long UINT64;
typedef int BOOL;
typedef unsigned char BYTE;
typedef unsigned int DWORD;
typedef void* PVOID;
typedef void* HWND;
typedef char* LPCTSTR;
typedef unsigned short USHORT;
typedef short SHORT;
typedef unsigned char* LPBYTE;
typedef char CHAR;
typedef short WORD;
typedef INT HANDLE;
typedef void VOID;
typedef unsigned long ULONG;
typedef void** LPVOID;
typedef unsigned char UCHAR;
typedef void* HMODULE;
#define TRUE 1
#define FALSE 0
//图像查表变换的方式
typedef enum
{
LUTMODE_PARAM_GEN=0,//通过调节参数动态生成LUT表
LUTMODE_PRESET, //使用预设的LUT表
LUTMODE_USER_DEF //使用用户自定义的LUT表
}emSdkLutMode;
//相机的视频流控制
typedef enum
{
RUNMODE_PLAY=0, //正常预览,捕获到图像就显示。(如果相机处于触发模式,则会等待触发帧的到来)
RUNMODE_PAUSE, //暂停,会暂停相机的图像输出,同时也不会去捕获图像
RUNMODE_STOP //停止相机工作。反初始化后,相机就处于停止模式
}emSdkRunMode;
//SDK内部显示接口的显示方式
typedef enum
{
DISPLAYMODE_SCALE=0, //缩放显示模式,缩放到显示控件的尺寸
DISPLAYMODE_REAL //1:1显示模式当图像尺寸大于显示控件的尺寸时只显示局部
}emSdkDisplayMode;
//录像状态
typedef enum
{
RECORD_STOP = 0, //停止
RECORD_START, //录像中
RECORD_PAUSE //暂停
}emSdkRecordMode;
//图像的镜像操作
typedef enum
{
MIRROR_DIRECTION_HORIZONTAL = 0,//水平镜像
MIRROR_DIRECTION_VERTICAL //垂直镜像
}emSdkMirrorDirection;
//相机视频的帧率
typedef enum
{
FRAME_SPEED_LOW = 0, //低速模式
FRAME_SPEED_NORMAL, //普通模式
FRAME_SPEED_HIGH, //高速模式(需要较高的传输带宽,多设备共享传输带宽时会对帧率的稳定性有影响)
FRAME_SPEED_SUPER //超高速模式(需要较高的传输带宽,多设备共享传输带宽时会对帧率的稳定性有影响)
}emSdkFrameSpeed;
//保存文件的格式类型
typedef enum
{
FILE_JPG = 1,//JPG
FILE_BMP = 2,//BMP
FILE_RAW = 4,//相机输出的bayer格式文件,对于不支持bayer格式输出相机无法保存为该格式
FILE_PNG = 8 //PNG
}emSdkFileType;
//相机中的图像传感器的工作模式
typedef enum
{
CONTINUATION = 0,//连续采集模式
SOFT_TRIGGER, //软件触发模式,由软件发送指令后,传感器开始采集指定帧数的图像,采集完成后,停止输出
EXTERNAL_TRIGGER //硬件触发模式,当接收到外部信号,传感器开始采集指定帧数的图像,采集完成后,停止输出
} emSdkSnapMode;
//自动曝光时抗频闪的频闪
typedef enum
{
LIGHT_FREQUENCY_50HZ = 0,//50HZ,一般的灯光都是50HZ
LIGHT_FREQUENCY_60HZ //60HZ,主要是指显示器的
}emSdkLightFrequency;
//相机的配置参数分为A,B,C,D 4组进行保存。
typedef enum
{
PARAMETER_TEAM_DEFAULT = 0xff,
PARAMETER_TEAM_A = 0,
PARAMETER_TEAM_B = 1,
PARAMETER_TEAM_C = 2,
PARAMETER_TEAM_D = 3
}emSdkParameterTeam;
/*emSdkParameterMode 相机参数加载模式,参数加载分为从文件和从设备加载两种方式
PARAM_MODE_BY_MODEL:所有同型号的相机共用ABCD四组参数文件。修改
一台相机的参数文件,会影响到整个同型号的
相机参数加载。
PARAM_MODE_BY_NAME:所有设备名相同的相机共用ABCD四组参数文件。
默认情况下,当电脑上只接了某型号一台相机时,
设备名都是一样的,而您希望某一台相机能够加载
不同的参数文件,则可以通过修改其设备名的方式
来让其加载指定的参数文件。
PARAM_MODE_BY_SN:相机按照自己的唯一序列号来加载ABCD四组参数文件
序列号在出厂时已经固化在相机内,每台相机的序列号
都不相同,通过这种方式,每台相机的参数文件都是独立的。
您可以根据自己的使用环境,灵活使用以上几种方式加载参数。例如,以
MV-U300为例您希望多台该型号的相机在您的 电脑上都共用4组参数那么就
使用PARAM_MODE_BY_MODEL方式;如果您希望其中某一台或者某几台MV-U300能
使用自己参数文件而其余的MV-U300又要使用相同的参数文件那么使用
PARAM_MODE_BY_NAME方式;如果您希望每台MV-U300都使用不同的参数文件那么
使用PARAM_MODE_BY_SN方式。
参数文件存在安装目录的 \Camera\Configs 目录下以config为后缀名的文件。
*/
typedef enum
{
PARAM_MODE_BY_MODEL = 0, //根据相机型号名从文件中加载参数例如MV-U300
PARAM_MODE_BY_NAME, //根据设备昵称(tSdkCameraDevInfo.acFriendlyName)从文件中加载参数例如MV-U300,该昵称可自定义
PARAM_MODE_BY_SN, //根据设备的唯一序列号从文件中加载参数,序列号在出厂时已经写入设备,每台相机拥有不同的序列号。
PARAM_MODE_IN_DEVICE //从设备的固态存储器中加载参数。不是所有的型号都支持从相机中读写参数组由tSdkCameraCapbility.bParamInDevice决定
}emSdkParameterMode;
//SDK生成的相机配置页面掩码值
typedef enum
{
PROP_SHEET_INDEX_EXPOSURE = 0,
PROP_SHEET_INDEX_ISP_COLOR,
PROP_SHEET_INDEX_ISP_LUT,
PROP_SHEET_INDEX_ISP_SHAPE,
PROP_SHEET_INDEX_VIDEO_FORMAT,
PROP_SHEET_INDEX_RESOLUTION,
PROP_SHEET_INDEX_IO_CTRL,
PROP_SHEET_INDEX_TRIGGER_SET,
PROP_SHEET_INDEX_OVERLAY,
PROP_SHEET_INDEX_DEVICE_INFO
}emSdkPropSheetMask;
//SDK生成的相机配置页面的回调消息类型
typedef enum
{
SHEET_MSG_LOAD_PARAM_DEFAULT = 0, //参数被恢复成默认后,触发该消息
SHEET_MSG_LOAD_PARAM_GROUP, //加载指定参数组,触发该消息
SHEET_MSG_LOAD_PARAM_FROMFILE, //从指定文件加载参数后,触发该消息
SHEET_MSG_SAVE_PARAM_GROUP //当前参数组被保存时,触发该消息
}emSdkPropSheetMsg;
//可视化选择参考窗口的类型
typedef enum
{
REF_WIN_AUTO_EXPOSURE = 0,
REF_WIN_WHITE_BALANCE,
}emSdkRefWinType;
//可视化选择参考窗口的类型
typedef enum
{
RES_MODE_PREVIEW = 0,
RES_MODE_SNAPSHOT,
}emSdkResolutionMode;
//白平衡时色温模式
typedef enum
{
CT_MODE_AUTO = 0, //自动识别色温
CT_MODE_PRESET, //使用指定的预设色温
CT_MODE_USER_DEF //自定义色温(增益和矩阵)
}emSdkClrTmpMode;
//LUT的颜色通道
typedef enum
{
LUT_CHANNEL_ALL = 0,//R,B,G三通道同时调节
LUT_CHANNEL_RED, //红色通道
LUT_CHANNEL_GREEN, //绿色通道
LUT_CHANNEL_BLUE, //蓝色通道
}emSdkLutChannel;
//ISP处理单元
typedef enum
{
ISP_PROCESSSOR_PC = 0,//使用PC的软件ISP模块
ISP_PROCESSSOR_DEVICE //使用相机自带的硬件ISP模块
}emSdkIspProcessor;
//闪光灯信号控制方式
typedef enum
{
STROBE_SYNC_WITH_TRIG_AUTO = 0, //和触发信号同步触发后相机进行曝光时自动生成STROBE信号。此时有效极性可设置(CameraSetStrobePolarity)。
STROBE_SYNC_WITH_TRIG_MANUAL, //和触发信号同步触发后STROBE延时指定的时间后(CameraSetStrobeDelayTime),再持续指定时间的脉冲(CameraSetStrobePulseWidth),有效极性可设置(CameraSetStrobePolarity)。
STROBE_ALWAYS_HIGH, //始终为高忽略STROBE信号的其他设置
STROBE_ALWAYS_LOW //始终为低忽略STROBE信号的其他设置
}emStrobeControl;
//硬件外触发的信号种类
typedef enum
{
EXT_TRIG_LEADING_EDGE = 0, //上升沿触发,默认为该方式
EXT_TRIG_TRAILING_EDGE, //下降沿触发
EXT_TRIG_HIGH_LEVEL, //高电平触发,电平宽度决定曝光时间,仅部分型号的相机支持电平触发方式。
EXT_TRIG_LOW_LEVEL //低电平触发,
}emExtTrigSignal;
//硬件外触发时的快门方式
typedef enum
{
EXT_TRIG_EXP_STANDARD = 0, //标准方式,默认为该方式。
EXT_TRIG_EXP_GRR, //全局复位方式部分滚动快门的CMOS型号的相机支持该方式配合外部机械快门可以达到全局快门的效果适合拍高速运动的物体
}emExtTrigShutterMode;
//相机的设备信息
typedef struct
{
char acProductSeries[32]; // 产品系列
char acProductName[32]; // 产品名称
char acFriendlyName[32]; // 产品昵称,用户可自定义改昵称,保存在相机内,用于区分多个相机同时使用,可以用CameraSetFriendlyName接口改变该昵称设备重启后生效。
char acLinkName[32]; // 内核符号连接名,内部使用
char acDriverVersion[32]; // 驱动版本
char acSensorType[32]; // sensor类型
char acPortType[32]; // 接口类型
char acSn[32]; // 产品唯一序列号
UINT uInstance; // 该型号相机在该电脑上的实例索引号,用于区分同型号多相机
} tSdkCameraDevInfo;
//tSdkResolutionRange结构体中SKIP、 BIN、RESAMPLE模式的掩码值
#define MASK_2X2_HD (1<<0) //硬件SKIP、BIN、重采样 2X2
#define MASK_3X3_HD (1<<1)
#define MASK_4X4_HD (1<<2)
#define MASK_5X5_HD (1<<3)
#define MASK_6X6_HD (1<<4)
#define MASK_7X7_HD (1<<5)
#define MASK_8X8_HD (1<<6)
#define MASK_9X9_HD (1<<7)
#define MASK_10X10_HD (1<<8)
#define MASK_11X11_HD (1<<9)
#define MASK_12X12_HD (1<<10)
#define MASK_13X13_HD (1<<11)
#define MASK_14X14_HD (1<<12)
#define MASK_15X15_HD (1<<13)
#define MASK_16X16_HD (1<<14)
#define MASK_17X17_HD (1<<15)
#define MASK_2X2_SW (1<<16) //硬件SKIP、BIN、重采样 2X2
#define MASK_3X3_SW (1<<17)
#define MASK_4X4_SW (1<<18)
#define MASK_5X5_SW (1<<19)
#define MASK_6X6_SW (1<<20)
#define MASK_7X7_SW (1<<21)
#define MASK_8X8_SW (1<<22)
#define MASK_9X9_SW (1<<23)
#define MASK_10X10_SW (1<<24)
#define MASK_11X11_SW (1<<25)
#define MASK_12X12_SW (1<<26)
#define MASK_13X13_SW (1<<27)
#define MASK_14X14_SW (1<<28)
#define MASK_15X15_SW (1<<29)
#define MASK_16X16_SW (1<<30)
#define MASK_17X17_SW (1<<31)
//相机的分辨率设定范围用于构件UI
typedef struct
{
INT iHeightMax; //图像最大高度
INT iHeightMin; //图像最小高度
INT iWidthMax; //图像最大宽度
INT iWidthMin; //图像最小宽度
UINT uSkipModeMask; //SKIP模式掩码为0表示不支持SKIP 。bit0为1,表示支持SKIP 2x2 ;bit1为1表示支持SKIP 3x3....
UINT uBinSumModeMask; //BIN(求和)模式掩码为0表示不支持BIN 。bit0为1,表示支持BIN 2x2 ;bit1为1表示支持BIN 3x3....
UINT uBinAverageModeMask; //BIN(求均值)模式掩码为0表示不支持BIN 。bit0为1,表示支持BIN 2x2 ;bit1为1表示支持BIN 3x3....
UINT uResampleMask; //硬件重采样的掩码
} tSdkResolutionRange;
//相机的分辨率描述
typedef struct
{
INT iIndex; // 索引号,[0,N]表示预设的分辨率(N 为预设分辨率的最大个数一般不超过20),OXFF 表示自定义分辨率(ROI)
char acDescription[32]; // 该分辨率的描述信息。仅预设分辨率时该信息有效。自定义分辨率可忽略该信息
UINT uBinSumMode; // BIN(求和)的模式,范围不能超过tSdkResolutionRange中uBinSumModeMask
UINT uBinAverageMode; // BIN(求均值)的模式,范围不能超过tSdkResolutionRange中uBinAverageModeMask
UINT uSkipMode; // 是否SKIP的尺寸为0表示禁止SKIP模式范围不能超过tSdkResolutionRange中uSkipModeMask
UINT uResampleMask; // 硬件重采样的掩码
INT iHOffsetFOV; // 采集视场相对于Sensor最大视场左上角的垂直偏移
INT iVOffsetFOV; // 采集视场相对于Sensor最大视场左上角的水平偏移
INT iWidthFOV; // 采集视场的宽度
INT iHeightFOV; // 采集视场的高度
INT iWidth; // 相机最终输出的图像的宽度
INT iHeight; // 相机最终输出的图像的高度
INT iWidthZoomHd; // 硬件缩放的宽度,不需要进行此操作的分辨率此变量设置为0.
INT iHeightZoomHd; // 硬件缩放的高度,不需要进行此操作的分辨率此变量设置为0.
INT iWidthZoomSw; // 软件缩放的宽度,不需要进行此操作的分辨率此变量设置为0.
INT iHeightZoomSw; // 软件缩放的高度,不需要进行此操作的分辨率此变量设置为0.
} tSdkImageResolution;
//相机白平衡色温模式描述信息
typedef struct
{
INT iIndex; // 模式索引号
char acDescription[32]; // 描述信息
} tSdkColorTemperatureDes;
//相机帧率描述信息
typedef struct
{
INT iIndex; // 帧率索引号一般0对应于低速模式1对应于普通模式2对应于高速模式
char acDescription[32]; // 描述信息
} tSdkFrameSpeed;
//相机曝光功能范围定义
typedef struct
{
UINT uiTargetMin; //自动曝光亮度目标最小值
UINT uiTargetMax; //自动曝光亮度目标最大值
UINT uiAnalogGainMin; //模拟增益的最小值单位为fAnalogGainStep中定义
UINT uiAnalogGainMax; //模拟增益的最大值单位为fAnalogGainStep中定义
float fAnalogGainStep; //模拟增益每增加1对应的增加的放大倍数。例如uiAnalogGainMin一般为16fAnalogGainStep一般为0.125那么最小放大倍数就是16*0.125 = 2倍
UINT uiExposeTimeMin; //手动模式下,曝光时间的最小值,单位:行。根据CameraGetExposureLineTime可以获得一行对应的时间(微秒),从而得到整帧的曝光时间
UINT uiExposeTimeMax; //手动模式下,曝光时间的最大值,单位:行
} tSdkExpose;
//触发模式描述
typedef struct
{
INT iIndex; //模式索引号
char acDescription[32]; //该模式的描述信息
} tSdkTrigger;
//传输分包大小描述(主要是针对网络相机有效)
typedef struct
{
INT iIndex; //分包大小索引号
char acDescription[32]; //对应的描述信息
UINT iPackSize;
} tSdkPackLength;
//预设的LUT表描述
typedef struct
{
INT iIndex; //编号
char acDescription[32]; //描述信息
} tSdkPresetLut;
//AE算法描述
typedef struct
{
INT iIndex; //编号
char acDescription[32]; //描述信息
} tSdkAeAlgorithm;
//RAW转RGB算法描述
typedef struct
{
INT iIndex; //编号
char acDescription[32]; //描述信息
} tSdkBayerDecodeAlgorithm;
//帧率统计信息
typedef struct
{
INT iTotal; //当前采集的总帧数(包括错误帧)
INT iCapture; //当前采集的有效帧的数量
INT iLost; //当前丢帧的数量
} tSdkFrameStatistic;
//相机输出的图像数据格式
typedef struct
{
INT iIndex; //格式种类编号
char acDescription[32]; //描述信息
UINT iMediaType; //对应的图像格式编码如CAMERA_MEDIA_TYPE_BAYGR8在本文件中有定义。
} tSdkMediaType;
//伽马的设定范围
typedef struct
{
INT iMin; //最小值
INT iMax; //最大值
} tGammaRange;
//对比度的设定范围
typedef struct
{
INT iMin; //最小值
INT iMax; //最大值
} tContrastRange;
//RGB三通道数字增益的设定范围
typedef struct
{
INT iRGainMin; //红色增益的最小值
INT iRGainMax; //红色增益的最大值
INT iGGainMin; //绿色增益的最小值
INT iGGainMax; //绿色增益的最大值
INT iBGainMin; //蓝色增益的最小值
INT iBGainMax; //蓝色增益的最大值
} tRgbGainRange;
//饱和度设定的范围
typedef struct
{
INT iMin; //最小值
INT iMax; //最大值
} tSaturationRange;
//锐化的设定范围
typedef struct
{
INT iMin; //最小值
INT iMax; //最大值
} tSharpnessRange;
//ISP模块的使能信息
typedef struct
{
BOOL bMonoSensor; //表示该型号相机是否为黑白相机,如果是黑白相机,则颜色相关的功能都无法调节
BOOL bWbOnce; //表示该型号相机是否支持手动白平衡功能
BOOL bAutoWb; //表示该型号相机是否支持自动白平衡功能
BOOL bAutoExposure; //表示该型号相机是否支持自动曝光功能
BOOL bManualExposure; //表示该型号相机是否支持手动曝光功能
BOOL bAntiFlick; //表示该型号相机是否支持抗频闪功能
BOOL bDeviceIsp; //表示该型号相机是否支持硬件ISP功能
BOOL bForceUseDeviceIsp;//bDeviceIsp和bForceUseDeviceIsp同时为TRUE时表示强制只用硬件ISP不可取消。
BOOL bZoomHD; //相机硬件是否支持图像缩放输出(只能是缩小)。
} tSdkIspCapacity;
/* 定义整合的设备描述信息这些信息可以用于动态构建UI */
typedef struct
{
tSdkTrigger *pTriggerDesc; // 触发模式
INT iTriggerDesc; // 触发模式的个数即pTriggerDesc数组的大小
tSdkImageResolution *pImageSizeDesc;// 预设分辨率选择
INT iImageSizeDesc; // 预设分辨率的个数即pImageSizeDesc数组的大小
tSdkColorTemperatureDes *pClrTempDesc;// 预设色温模式,用于白平衡
INT iClrTempDesc;
tSdkMediaType *pMediaTypeDesc; // 相机输出图像格式
INT iMediaTypdeDesc; // 相机输出图像格式的种类个数即pMediaTypeDesc数组的大小。
tSdkFrameSpeed *pFrameSpeedDesc; // 可调节帧速类型,对应界面上普通 高速 和超级三种速度设置
INT iFrameSpeedDesc; // 可调节帧速类型的个数即pFrameSpeedDesc数组的大小。
tSdkPackLength *pPackLenDesc; // 传输包长度,一般用于网络设备
INT iPackLenDesc; // 可供选择的传输分包长度的个数即pPackLenDesc数组的大小。
INT iOutputIoCounts; // 可编程输出IO的个数
INT iInputIoCounts; // 可编程输入IO的个数
tSdkPresetLut *pPresetLutDesc; // 相机预设的LUT表
INT iPresetLut; // 相机预设的LUT表的个数即pPresetLutDesc数组的大小
INT iUserDataMaxLen; // 指示该相机中用于保存用户数据区的最大长度。为0表示无。
BOOL bParamInDevice; // 指示该设备是否支持从设备中读写参数组。1为支持0不支持。
tSdkAeAlgorithm *pAeAlmSwDesc; // 软件自动曝光算法描述
int iAeAlmSwDesc; // 软件自动曝光算法个数
tSdkAeAlgorithm *pAeAlmHdDesc; // 硬件自动曝光算法描述为NULL表示不支持硬件自动曝光
int iAeAlmHdDesc; // 硬件自动曝光算法个数为0表示不支持硬件自动曝光
tSdkBayerDecodeAlgorithm *pBayerDecAlmSwDesc; // 软件Bayer转换为RGB数据的算法描述
int iBayerDecAlmSwDesc; // 软件Bayer转换为RGB数据的算法个数
tSdkBayerDecodeAlgorithm *pBayerDecAlmHdDesc; // 硬件Bayer转换为RGB数据的算法描述为NULL表示不支持
int iBayerDecAlmHdDesc; // 硬件Bayer转换为RGB数据的算法个数为0表示不支持
/* 图像参数的调节范围定义,用于动态构建UI*/
tSdkExpose sExposeDesc; // 曝光的范围值
tSdkResolutionRange sResolutionRange; // 分辨率范围描述
tRgbGainRange sRgbGainRange; // 图像数字增益范围描述
tSaturationRange sSaturationRange; // 饱和度范围描述
tGammaRange sGammaRange; // 伽马范围描述
tContrastRange sContrastRange; // 对比度范围描述
tSharpnessRange sSharpnessRange; // 锐化范围描述
tSdkIspCapacity sIspCapacity; // ISP能力描述
} tSdkCameraCapbility;
//图像帧头信息
typedef struct
{
UINT uiMediaType; // 图像格式,Image Format
UINT uBytes; // 图像数据字节数,Total bytes
INT iWidth; // 图像的宽度,调用图像处理函数后,该变量可能被动态修改,来指示处理后的图像尺寸
INT iHeight; // 图像的高度,调用图像处理函数后,该变量可能被动态修改,来指示处理后的图像尺寸
INT iWidthZoomSw; // 软件缩放的宽度,不需要进行软件裁剪的图像此变量设置为0.
INT iHeightZoomSw; // 软件缩放的高度,不需要进行软件裁剪的图像此变量设置为0.
BOOL bIsTrigger; // 指示是否为触发帧 is trigger
UINT uiTimeStamp; // 该帧的采集时间单位0.1毫秒
UINT uiExpTime; // 当前图像的曝光值单位为微秒us
float fAnalogGain; // 当前图像的模拟增益倍数
INT iGamma; // 该帧图像的伽马设定值仅当LUT模式为动态参数生成时有效其余模式下为-1
INT iContrast; // 该帧图像的对比度设定值仅当LUT模式为动态参数生成时有效其余模式下为-1
INT iSaturation; // 该帧图像的饱和度设定值对于黑白相机无意义为0
float fRgain; // 该帧图像处理的红色数字增益倍数对于黑白相机无意义为1
float fGgain; // 该帧图像处理的绿色数字增益倍数对于黑白相机无意义为1
float fBgain; // 该帧图像处理的蓝色数字增益倍数对于黑白相机无意义为1
}tSdkFrameHead;
//图像帧描述
typedef struct sCameraFrame
{
tSdkFrameHead head; //帧头
BYTE * pBuffer; //数据区
}tSdkFrame;
//图像捕获的回调函数定义
typedef void (*CAMERA_SNAP_PROC)(CameraHandle hCamera, BYTE *pFrameBuffer, tSdkFrameHead* pFrameHead,PVOID pContext);
//SDK生成的相机配置页面的消息回调函数定义
typedef void (*CAMERA_PAGE_MSG_PROC)(CameraHandle hCamera,UINT MSG,UINT uParam,PVOID pContext);
//----------------------------IMAGE FORMAT DEFINE------------------------------------
//----------------------------图像格式定义-------------------------------------------
#define CAMERA_MEDIA_TYPE_MONO 0x01000000
#define CAMERA_MEDIA_TYPE_RGB 0x02000000
#define CAMERA_MEDIA_TYPE_COLOR 0x02000000
#define CAMERA_MEDIA_TYPE_CUSTOM 0x80000000
#define CAMERA_MEDIA_TYPE_COLOR_MASK 0xFF000000
#define CAMERA_MEDIA_TYPE_OCCUPY1BIT 0x00010000
#define CAMERA_MEDIA_TYPE_OCCUPY2BIT 0x00020000
#define CAMERA_MEDIA_TYPE_OCCUPY4BIT 0x00040000
#define CAMERA_MEDIA_TYPE_OCCUPY8BIT 0x00080000
#define CAMERA_MEDIA_TYPE_OCCUPY10BIT 0x000A0000
#define CAMERA_MEDIA_TYPE_OCCUPY12BIT 0x000C0000
#define CAMERA_MEDIA_TYPE_OCCUPY16BIT 0x00100000
#define CAMERA_MEDIA_TYPE_OCCUPY24BIT 0x00180000
#define CAMERA_MEDIA_TYPE_OCCUPY32BIT 0x00200000
#define CAMERA_MEDIA_TYPE_OCCUPY36BIT 0x00240000
#define CAMERA_MEDIA_TYPE_OCCUPY48BIT 0x00300000
#define CAMERA_MEDIA_TYPE_EFFECTIVE_PIXEL_SIZE_MASK 0x00FF0000
#define CAMERA_MEDIA_TYPE_EFFECTIVE_PIXEL_SIZE_SHIFT 16
#define CAMERA_MEDIA_TYPE_ID_MASK 0x0000FFFF
#define CAMERA_MEDIA_TYPE_COUNT 0x46
/*mono*/
#define CAMERA_MEDIA_TYPE_MONO1P (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY1BIT | 0x0037)
#define CAMERA_MEDIA_TYPE_MONO2P (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY2BIT | 0x0038)
#define CAMERA_MEDIA_TYPE_MONO4P (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY4BIT | 0x0039)
#define CAMERA_MEDIA_TYPE_MONO8 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0001)
#define CAMERA_MEDIA_TYPE_MONO8S (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0002)
#define CAMERA_MEDIA_TYPE_MONO10 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0003)
#define CAMERA_MEDIA_TYPE_MONO10_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0004)
#define CAMERA_MEDIA_TYPE_MONO12 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0005)
#define CAMERA_MEDIA_TYPE_MONO12_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0006)
#define CAMERA_MEDIA_TYPE_MONO14 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0025)
#define CAMERA_MEDIA_TYPE_MONO16 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0007)
/*Bayer */
#define CAMERA_MEDIA_TYPE_BAYGR8 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0008)
#define CAMERA_MEDIA_TYPE_BAYRG8 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0009)
#define CAMERA_MEDIA_TYPE_BAYGB8 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x000A)
#define CAMERA_MEDIA_TYPE_BAYBG8 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x000B)
#define CAMERA_MEDIA_TYPE_BAYGR10_MIPI (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0026)
#define CAMERA_MEDIA_TYPE_BAYRG10_MIPI (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0027)
#define CAMERA_MEDIA_TYPE_BAYGB10_MIPI (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0028)
#define CAMERA_MEDIA_TYPE_BAYBG10_MIPI (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0029)
#define CAMERA_MEDIA_TYPE_BAYGR10 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000C)
#define CAMERA_MEDIA_TYPE_BAYRG10 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000D)
#define CAMERA_MEDIA_TYPE_BAYGB10 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000E)
#define CAMERA_MEDIA_TYPE_BAYBG10 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000F)
#define CAMERA_MEDIA_TYPE_BAYGR12 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0010)
#define CAMERA_MEDIA_TYPE_BAYRG12 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0011)
#define CAMERA_MEDIA_TYPE_BAYGB12 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0012)
#define CAMERA_MEDIA_TYPE_BAYBG12 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0013)
#define CAMERA_MEDIA_TYPE_BAYGR10_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0026)
#define CAMERA_MEDIA_TYPE_BAYRG10_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0027)
#define CAMERA_MEDIA_TYPE_BAYGB10_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0028)
#define CAMERA_MEDIA_TYPE_BAYBG10_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0029)
#define CAMERA_MEDIA_TYPE_BAYGR12_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002A)
#define CAMERA_MEDIA_TYPE_BAYRG12_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002B)
#define CAMERA_MEDIA_TYPE_BAYGB12_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002C)
#define CAMERA_MEDIA_TYPE_BAYBG12_PACKED (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002D)
#define CAMERA_MEDIA_TYPE_BAYGR16 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x002E)
#define CAMERA_MEDIA_TYPE_BAYRG16 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x002F)
#define CAMERA_MEDIA_TYPE_BAYGB16 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0030)
#define CAMERA_MEDIA_TYPE_BAYBG16 (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0031)
/*RGB */
#define CAMERA_MEDIA_TYPE_RGB8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0014)
#define CAMERA_MEDIA_TYPE_BGR8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0015)
#define CAMERA_MEDIA_TYPE_RGBA8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x0016)
#define CAMERA_MEDIA_TYPE_BGRA8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x0017)
#define CAMERA_MEDIA_TYPE_RGB10 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0018)
#define CAMERA_MEDIA_TYPE_BGR10 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0019)
#define CAMERA_MEDIA_TYPE_RGB12 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x001A)
#define CAMERA_MEDIA_TYPE_BGR12 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x001B)
#define CAMERA_MEDIA_TYPE_RGB16 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0033)
#define CAMERA_MEDIA_TYPE_RGB10V1_PACKED (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x001C)
#define CAMERA_MEDIA_TYPE_RGB10P32 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x001D)
#define CAMERA_MEDIA_TYPE_RGB12V1_PACKED (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY36BIT | 0X0034)
#define CAMERA_MEDIA_TYPE_RGB565P (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0035)
#define CAMERA_MEDIA_TYPE_BGR565P (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0X0036)
/*YUV and YCbCr*/
#define CAMERA_MEDIA_TYPE_YUV411_8_UYYVYY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x001E)
#define CAMERA_MEDIA_TYPE_YUV422_8_UYVY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x001F)
#define CAMERA_MEDIA_TYPE_YUV422_8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0032)
#define CAMERA_MEDIA_TYPE_YUV8_UYV (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0020)
#define CAMERA_MEDIA_TYPE_YCBCR8_CBYCR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x003A)
//CAMERA_MEDIA_TYPE_YCBCR422_8 : YYYYCbCrCbCr
#define CAMERA_MEDIA_TYPE_YCBCR422_8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x003B)
#define CAMERA_MEDIA_TYPE_YCBCR422_8_CBYCRY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0043)
#define CAMERA_MEDIA_TYPE_YCBCR411_8_CBYYCRYY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x003C)
#define CAMERA_MEDIA_TYPE_YCBCR601_8_CBYCR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x003D)
#define CAMERA_MEDIA_TYPE_YCBCR601_422_8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x003E)
#define CAMERA_MEDIA_TYPE_YCBCR601_422_8_CBYCRY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0044)
#define CAMERA_MEDIA_TYPE_YCBCR601_411_8_CBYYCRYY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x003F)
#define CAMERA_MEDIA_TYPE_YCBCR709_8_CBYCR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0040)
#define CAMERA_MEDIA_TYPE_YCBCR709_422_8 (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0041)
#define CAMERA_MEDIA_TYPE_YCBCR709_422_8_CBYCRY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0045)
#define CAMERA_MEDIA_TYPE_YCBCR709_411_8_CBYYCRYY (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0042)
/*RGB Planar */
#define CAMERA_MEDIA_TYPE_RGB8_PLANAR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0021)
#define CAMERA_MEDIA_TYPE_RGB10_PLANAR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0022)
#define CAMERA_MEDIA_TYPE_RGB12_PLANAR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0023)
#define CAMERA_MEDIA_TYPE_RGB16_PLANAR (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0024)
#endif

View File

@@ -0,0 +1,102 @@
#ifndef __CAMERA_STATUS_DEF__
#define __CAMERA_STATUS_DEF__
typedef int CameraSdkStatus;
/*常用的宏*/
#define SDK_SUCCESS(_FUC_) (_FUC_ == CAMERA_STATUS_SUCCESS)
#define SDK_UNSUCCESS(_FUC_) (_FUC_ != CAMERA_STATUS_SUCCESS)
#define SDK_UNSUCCESS_RETURN(_FUC_,RET) if((RET = _FUC_) != CAMERA_STATUS_SUCCESS)\
{\
return RET;\
}
#define SDK_UNSUCCESS_BREAK(_FUC_) if(_FUC_ != CAMERA_STATUS_SUCCESS)\
{\
break;\
}
/* 常用错误 */
#define CAMERA_STATUS_SUCCESS 0 // 操作成功
#define CAMERA_STATUS_FAILED -1 // 操作失败
#define CAMERA_STATUS_INTERNAL_ERROR -2 // 内部错误
#define CAMERA_STATUS_UNKNOW -3 // 未知错误
#define CAMERA_STATUS_NOT_SUPPORTED -4 // 不支持该功能
#define CAMERA_STATUS_NOT_INITIALIZED -5 // 初始化未完成
#define CAMERA_STATUS_PARAMETER_INVALID -6 // 参数无效
#define CAMERA_STATUS_PARAMETER_OUT_OF_BOUND -7 // 参数越界
#define CAMERA_STATUS_UNENABLED -8 // 未使能
#define CAMERA_STATUS_USER_CANCEL -9 // 用户手动取消了比如roi面板点击取消返回
#define CAMERA_STATUS_PATH_NOT_FOUND -10 // 注册表中没有找到对应的路径
#define CAMERA_STATUS_SIZE_DISMATCH -11 // 获得图像数据长度和定义的尺寸不匹配
#define CAMERA_STATUS_TIME_OUT -12 // 超时错误
#define CAMERA_STATUS_IO_ERROR -13 // 硬件IO错误
#define CAMERA_STATUS_COMM_ERROR -14 // 通讯错误
#define CAMERA_STATUS_BUS_ERROR -15 // 总线错误
#define CAMERA_STATUS_NO_DEVICE_FOUND -16 // 没有发现设备
#define CAMERA_STATUS_NO_LOGIC_DEVICE_FOUND -17 // 未找到逻辑设备
#define CAMERA_STATUS_DEVICE_IS_OPENED -18 // 设备已经打开
#define CAMERA_STATUS_DEVICE_IS_CLOSED -19 // 设备已经关闭
#define CAMERA_STATUS_DEVICE_VEDIO_CLOSED -20 // 没有打开设备视频,调用录像相关的函数时,如果相机视频没有打开,则回返回该错误。
#define CAMERA_STATUS_NO_MEMORY -21 // 没有足够系统内存
#define CAMERA_STATUS_FILE_CREATE_FAILED -22 // 创建文件失败
#define CAMERA_STATUS_FILE_INVALID -23 // 文件格式无效
#define CAMERA_STATUS_WRITE_PROTECTED -24 // 写保护,不可写
#define CAMERA_STATUS_GRAB_FAILED -25 // 数据采集失败
#define CAMERA_STATUS_LOST_DATA -26 // 数据丢失,不完整
#define CAMERA_STATUS_EOF_ERROR -27 // 未接收到帧结束符
#define CAMERA_STATUS_BUSY -28 // 正忙(上一次操作还在进行中),此次操作不能进行
#define CAMERA_STATUS_WAIT -29 // 需要等待(进行操作的条件不成立),可以再次尝试
#define CAMERA_STATUS_IN_PROCESS -30 // 正在进行,已经被操作过
#define CAMERA_STATUS_IIC_ERROR -31 // IIC传输错误
#define CAMERA_STATUS_SPI_ERROR -32 // SPI传输错误
#define CAMERA_STATUS_USB_CONTROL_ERROR -33 // USB控制传输错误
#define CAMERA_STATUS_USB_BULK_ERROR -34 // USB BULK传输错误
#define CAMERA_STATUS_SOCKET_INIT_ERROR -35 // 网络传输套件初始化失败
#define CAMERA_STATUS_GIGE_FILTER_INIT_ERROR -36 // 网络相机内核过滤驱动初始化失败,请检查是否正确安装了驱动,或者重新安装。
#define CAMERA_STATUS_NET_SEND_ERROR -37 // 网络数据发送错误
#define CAMERA_STATUS_DEVICE_LOST -38 // 与网络相机失去连接,心跳检测超时
#define CAMERA_STATUS_DATA_RECV_LESS -39 // 接收到的字节数比请求的少
#define CAMERA_STATUS_FUNCTION_LOAD_FAILED -40 // 从文件中加载程序失败
#define CAMERA_STATUS_CRITICAL_FILE_LOST -41 // 程序运行所必须的文件丢失。
#define CAMERA_STATUS_SENSOR_ID_DISMATCH -42 // 固件和程序不匹配,原因是下载了错误的固件。
#define CAMERA_STATUS_OUT_OF_RANGE -43 // 参数超出有效范围。
#define CAMERA_STATUS_REGISTRY_ERROR -44 // 安装程序注册错误。请重新安装程序或者运行安装目录Setup/Installer.exe
#define CAMERA_STATUS_ACCESS_DENY -45 // 禁止访问。指定相机已经被其他程序占用时,再申请访问该相机,会返回该状态。(一个相机不能被多个程序同时访问)
#define CAMERA_STATUS_CAMERA_NEED_RESET -46 // 表示相机需要复位后才能正常使用,此时请让相机断电重启,或者重启操作系统后,便可正常使用。
//和AIA制定的标准相同
/*#define CAMERA_AIA_SUCCESS 0x0000 */
#define CAMERA_AIA_PACKET_RESEND 0x0100 //该帧需要重传
#define CAMERA_AIA_NOT_IMPLEMENTED 0x8001 //设备不支持的命令
#define CAMERA_AIA_INVALID_PARAMETER 0x8002 //命令参数非法
#define CAMERA_AIA_INVALID_ADDRESS 0x8003 //不可访问的地址
#define CAMERA_AIA_WRITE_PROTECT 0x8004 //访问的对象不可写
#define CAMERA_AIA_BAD_ALIGNMENT 0x8005 //访问的地址没有按照要求对齐
#define CAMERA_AIA_ACCESS_DENIED 0x8006 //没有访问权限
#define CAMERA_AIA_BUSY 0x8007 //命令正在处理中
#define CAMERA_AIA_DEPRECATED 0x8008 //0x8008-0x0800B 0x800F 该指令已经废弃
#define CAMERA_AIA_PACKET_UNAVAILABLE 0x800C //包无效
#define CAMERA_AIA_DATA_OVERRUN 0x800D //数据溢出,通常是收到的数据比需要的多
#define CAMERA_AIA_INVALID_HEADER 0x800E //数据包头部中某些区域与协议不匹配
#define CAMERA_AIA_PACKET_NOT_YET_AVAILABLE 0x8010 //图像分包数据还未准备好,多用于触发模式,应用程序访问超时
#define CAMERA_AIA_PACKET_AND_PREV_REMOVED_FROM_MEMORY 0x8011 //需要访问的分包已经不存在。多用于重传时数据已经不在缓冲区中
#define CAMERA_AIA_PACKET_REMOVED_FROM_MEMORY 0x8012 //CAMERA_AIA_PACKET_AND_PREV_REMOVED_FROM_MEMORY
#define CAMERA_AIA_NO_REF_TIME 0x0813 //没有参考时钟源。多用于时间同步的命令执行时
#define CAMERA_AIA_PACKET_TEMPORARILY_UNAVAILABLE 0x0814 //由于信道带宽问题,当前分包暂时不可用,需稍后进行访问
#define CAMERA_AIA_OVERFLOW 0x0815 //设备端数据溢出,通常是队列已满
#define CAMERA_AIA_ACTION_LATE 0x0816 //命令执行已经超过有效的指定时间
#define CAMERA_AIA_ERROR 0x8FFF //错误
#endif

View File

@@ -0,0 +1,60 @@
//
// Created by zhikun on 18-11-7.
// used for testing double cameras
// camera0 is left camera, camera1 is right camera.
//
#ifndef VIDEO_TEST1_CAMERA_WRAPPER_H
#define VIDEO_TEST1_CAMERA_WRAPPER_H
#include <stdio.h>
#include <iostream>
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include <opencv2/imgproc/imgproc.hpp>
#include "camera/wrapper_head.h"
#include "camera/camera_api.h"
class CameraWrapper: public WrapperHead {
private:
unsigned char* rgb_buffer0;
unsigned char* rgb_buffer1;
int camera_cnts;
int camera_status0, camera_status1;
tSdkCameraDevInfo camera_enum_list[2];
int h_camera0;
int h_camera1;
char camera_name0[32];
char camera_name1[32];
tSdkCameraCapbility tCapability0;
tSdkCameraCapbility tCapability1;
tSdkFrameHead frame_info0;
tSdkFrameHead frame_info1;
BYTE *pby_buffer0;
BYTE *pby_buffer1;
IplImage* iplImage0;
IplImage* iplImage1;
int channel0;
int channel1;
void swapCameraHandle();
public:
CameraWrapper();
~CameraWrapper() final;
bool init() final;
bool read(cv::Mat& src0, cv::Mat& src1) final;
bool readRaw(cv::Mat& src0, cv::Mat& src1);
bool readProcessed(cv::Mat& src0, cv::Mat& src1);
};
#endif //VIDEO_TEST1_CAMERA_WRAPPER_H

View File

@@ -0,0 +1,43 @@
//
// Created by zhikun on 18-11-16.
// wrapper for video read from file
//
#ifndef STEREOVISION_FROM_VIDEO_FILE_VIDEO_WRAPPER_H
#define STEREOVISION_FROM_VIDEO_FILE_VIDEO_WRAPPER_H
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "wrapper_head.h"
class VideoWrapper:public WrapperHead {
public:
VideoWrapper(const std::string& filename0, const std::string& filename1);
~VideoWrapper();
/**
* @brief initialize cameras
* @return bool value: whether it success
*/
bool init() final;
/**
* @brief read images from camera
* @param src_left : output source video of left camera
* @param src_right : output source video of right camera
* @return bool value: whether the reading is successful
*/
bool read(cv::Mat &src_left, cv::Mat &src_right) final;
private:
cv::VideoCapture video0, video1;
};
#endif //STEREOVISION_FROM_VIDEO_FILE_VIDEO_WRAPPER_H

View File

@@ -0,0 +1,24 @@
//
// Created by zhikun on 18-11-18.
//
#ifndef STEREOVISION_FROM_VIDEO_FILE_WRAPPER_HEAD_H
#define STEREOVISION_FROM_VIDEO_FILE_WRAPPER_HEAD_H
#include <opencv2/core/core.hpp>
/**
* @brief A virtual class for wrapper of camera and video files
*/
class WrapperHead {
public:
virtual ~WrapperHead() = default;;
virtual bool init() = 0;
virtual bool read(cv::Mat &src_left, cv::Mat &src_right) = 0;
};
#endif //STEREOVISION_FROM_VIDEO_FILE_WRAPPER_HEAD_H

138
include/log.h Normal file
View File

@@ -0,0 +1,138 @@
//
// Created by xinyang on 19-2-19.
//
#ifndef _LOG_H_
#define _LOG_H_
#include <stdio.h>
#include <type_traits>
#include <sys/time.h>
/************** Define the control code *************/
#define START_CTR "\033[0"
#define END_CTR "m"
#define CLEAR_CODE ";0"
#define LIGHT_CODE ";1"
#define LINE_CODE ";4"
#define BLINK_CODE ";5"
#define REVERSE_CODE ";7"
#define VANISH_CODE ";8"
#define WORD_WHITE_CODE ";30"
#define WORD_RED_CODE ";31"
#define WORD_GREEN_CODE ";32"
#define WORD_YELLOW_CODE ";33"
#define WORD_BLUE_CODE ";34"
#define WORD_PURPLE_CODE ";35"
#define WORD_CYAN_CODE ";36"
#define WORD_GRAY_CODE ";37"
#define BACK_WHITE_CODE ";40"
#define BACK_RED_CODE ";41"
#define BACK_GREEN_CODE ";42"
#define BACK_YELLOW_CODE ";43"
#define BACK_BLUE_CODE ";44"
#define BACK_PURPLE_CODE ";45"
#define BACK_CYAN_CODE ";46"
#define BACK_GRAY_CODE ";47"
#define CTRS(ctrs) START_CTR ctrs END_CTR
#define WORD_WHITE WORD_WHITE_CODE
#define WORD_RED WORD_RED_CODE
#define WORD_GREEN WORD_GREEN_CODE
#define WORD_YELLOW WORD_YELLOW_CODE
#define WORD_BLUE WORD_BLUE_CODE
#define WORD_PURPLE WORD_PURPLE_CODE
#define WORD_CYAN WORD_CYAN_CODE
#define WORD_GRAY WORD_GRAY_CODE
#define WORD_LIGHT_WHITE LIGHT_CODE WORD_WHITE
#define WORD_LIGHT_RED LIGHT_CODE WORD_RED
#define WORD_LIGHT_GREEN LIGHT_CODE WORD_GREEN
#define WORD_LIGHT_YELLOW LIGHT_CODE WORD_YELLOW
#define WORD_LIGHT_BLUE LIGHT_CODE WORD_BLUE
#define WORD_LIGHT_PURPLE LIGHT_CODE WORD_PURPLE
#define WORD_LIGHT_CYAN LIGHT_CODE WORD_CYAN
#define WORD_LIGHT_GRAY LIGHT_CODE WORD_GRAY
#define CLEAR_ALL CTRS(CLEAR_CODE)
/*************** Define the log level value ***************/
#define LOG_NONE 0
#define LOG_ERROR 1
#define LOG_WARRING 2
#define LOG_MSG 3
/************** Ensure the current log level **************/
#ifndef LOG_LEVEL
#define LOG_LEVEL LOG_MSG
#endif
#if LOG_LEVEL < LOG_NONE
#define LOG_LEVEL LOG_NONE
#elif LOG_LEVEL > LOG_MSG
#define LOG_LEVEL LOG_MSG
#endif
/******* Ensure the color corresponding to the level ******/
#ifndef LOG_ERROR_COLOR
#define LOG_ERROR_COLOR WORD_RED
#endif
#ifndef LOG_WARRING_COLOR
#define LOG_WARRING_COLOR WORD_YELLOW
#endif
#ifndef LOG_MSG_COLOR
#define LOG_MSG_COLOR WORD_GRAY
#endif
#ifndef LOG_LINK_COLOR
#define LOG_LINK_COLOR LINE_CODE WORD_BLUE
#endif
/******************** The log API *************************/
#define LOG_0(format, ...) printf(format, ##__VA_ARGS__)
#if LOG_LEVEL >= LOG_ERROR
#define LOG_1(format, ...) printf(format, ##__VA_ARGS__)
#else
#define LOG_1(format, ...) ((void)0)
#endif
#if LOG_LEVEL >= LOG_WARRING
#define LOG_2(format, ...) printf(format, ##__VA_ARGS__)
#else
#define LOG_2(format, ...) ((void)0)
#endif
#if LOG_LEVEL >= LOG_MSG
#define LOG_3(format, ...) printf(format, ##__VA_ARGS__)
#else
#define LOG_3(format, ...) ((void)0)
#endif
#define LOG_(level, format, ...) LOG_##level (format, ##__VA_ARGS__)
#define LOG(level, format, ...) LOG_(level, format"\n", ##__VA_ARGS__)
#define STR_CTR(ctrs, str) START_CTR ctrs END_CTR str CLEAR_ALL
#define LOGA(format, ...) LOG(LOG_NONE, format, ##__VA_ARGS__)
#define LOGA_INFO(format, ...) LOG(LOG_NONE, "<%s:%d>: " format, ##__VA_ARGS__)
#define LOGE(format, ...) LOG(LOG_ERROR, STR_CTR(LOG_ERROR_COLOR, "<ERROR>: " format), ## __VA_ARGS__)
#define LOGW(format, ...) LOG(LOG_WARRING, STR_CTR(LOG_WARRING_COLOR,"<WARRING>: " format), ## __VA_ARGS__)
#define LOGM(format, ...) LOG(LOG_MSG, STR_CTR(LOG_MSG_COLOR, "<MSG>: " format), ## __VA_ARGS__)
#define LOGE_INFO(format, ...) LOG(LOG_ERROR, \
STR_CTR(LOG_ERROR_COLOR, "<") \
STR_CTR(LOG_LINK_COLOR, "%s:%d") \
STR_CTR(LOG_ERROR_COLOR, " ERROR>: " format), \
__FILE__, __LINE__, ##__VA_ARGS__)
#define LOGW_INFO(format, ...) LOG(LOG_WARRING, \
STR_CTR(LOG_WARRING_COLOR,"<") \
STR_CTR(LOG_LINK_COLOR,"%s:%d") \
STR_CTR(LOG_WARRING_COLOR, " WARRING>: " format), \
__FILE__, __LINE__, ##__VA_ARGS__)
#define LOGM_INFO(format, ...) LOG(LOG_MSG, \
STR_CTR(LOG_MSG_COLOR, "<") \
STR_CTR(LOG_LINK_COLOR, "%s:%d") \
STR_CTR(LOG_MSG_COLOR, " MSG>: " format), \
__FILE__, __LINE__, ##__VA_ARGS__)
/******************** the time counter API ************************/
#if LOG_LEVEL > LOG_NONE && (!defined(DO_NOT_CNT_TIME))
#define CNT_TIME(str_ctrs, tag, codes, ...) do{\
timeval ts, te; \
gettimeofday(&ts, NULL); \
codes; \
gettimeofday(&te, NULL); \
LOGM(STR_CTR(str_ctrs, tag": %fms"), ## __VA_ARGS__, (te.tv_sec-ts.tv_sec)*1000.0 + (te.tv_usec-ts.tv_usec)/1000.0); \
}while(0)
#else
#define CNT_TIME(str_ctrs, tag, codes, ...) codes
#endif
#endif /* _LOG_H_ */

View File

@@ -0,0 +1,17 @@
//
// Created by xinyang on 19-4-7.
//
#ifndef _ADDITIONS_H_
#define _ADDITIONS_H_
#include <uart/uart.h>
#include <opencv2/core.hpp>
#include <thread>
std::thread* create_data_recv_thread(Uart *uart);
void save_video_file(cv::Mat &src);
void save_labelled_image(cv::Mat &src, cv::Rect2d box);
#endif /* _ADDITIONS_H_ */

19
include/options/options.h Normal file
View File

@@ -0,0 +1,19 @@
//
// Created by xinyang on 19-3-27.
//
#ifndef _OPTIONS_H_
#define _OPTIONS_H_
extern bool show_armor_box;
extern bool show_armor_boxes;
extern bool show_light_blobs;
extern bool show_origin;
extern bool save_labelled;
extern bool run_with_camera;
extern bool save_video;
extern bool collect_data;
void process_options(int argc, char *argv[]);
#endif /* _OPTIONS_H_ */

42
include/uart/uart.h Normal file
View File

@@ -0,0 +1,42 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#ifndef STEREOVISION_UART_H
#define STEREOVISION_UART_H
#include <stdio.h>
#include <time.h>
#include <iostream>
#include <errno.h>
#include <fcntl.h>
#include <stdint.h>
#include <string.h>
#include <termios.h>
#include <unistd.h>
#include <time.h>
//#include <opencv2/core/core.hpp>
class Uart {
private:
int fd;
char buff[8];
int fps;
time_t cur_time;
int set_opt(int fd, int nSpeed, int nBits, char nEvent, int nStop);
public:
Uart();
void sendTarget(float x, float y ,float z);
char receive();
void receive_data();
};
#endif //STEREOVISION_UART_H

BIN
libMVSDK.so Normal file

Binary file not shown.

118
main.cpp Normal file
View File

@@ -0,0 +1,118 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include <fstream>
#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "energy/energy.h"
#include "include/uart/uart.h"
#include "energy/param_struct_define.h"
#include "energy/constant.h"
#include "camera/camera_wrapper.h"
#include "camera/video_wrapper.h"
#include "camera/wrapper_head.h"
#include "armor_finder/armor_finder.h"
#include <time.h>
#include <thread>
using namespace cv;
using namespace std;
#define ENERGY_STATE 1
#define ARMOR_STATE 0
int state = ARMOR_STATE;
float yaw, pitch;
void uartReceive(Uart* uart);
int main()
{
Uart uart;
bool flag = true;
short done = 0;//用于检测是否已经读完初始激光中心时的角度
while (flag)
{
int ally_color = ALLY_RED;
int energy_part_rotation = CLOCKWISE;
int from_camera = 1;
cout<<"Input 1 for camera, 0 for video files"<<endl;
cin>>from_camera;
WrapperHead *video;
if(from_camera)
video = new CameraWrapper;
else
video = new VideoWrapper("r_l_640.avi", "fan_640.avi");
if (video->init()) {
cout << "Video source initialization successfully." << endl;
}
Mat src, src_none;
ArmorFinder armorFinder(ENEMY_BLUE, uart);
Energy energy(uart);
energy.setAllyColor(ally_color);
energy.setRotation(energy_part_rotation);
static thread receive(uartReceive, &uart);
if(state==1 && done == 0){
energy.uart.receive_data();
done = 1;
}
// energy.sendTargetByUart(-8,-8,-8);
time_t t1 = time(nullptr), t2 = time(nullptr);
while (video->read(src, src_none))
{
// if(!from_camera)energy.extract(src);
if(state == 1){
imshow("src", src);
energy.run(src);
}else{
armorFinder.run(src_none);
}
if (waitKey(10) == 'q') {
flag = false;
break;
}
}
delete video;
cout << "Program fails. Restarting" << endl;
}
return 0;
}
void uartReceive(Uart* uart){
char buffer[100];
int cnt=0;
while(true){
char data;
while((data=uart->receive()) != '\n'){
buffer[cnt++] = data;
}buffer[cnt] = 0;
if(cnt==1 && buffer[0]=='e'){
state = ENERGY_STATE;
}else if(cnt==1 && buffer[0]=='a'){
state = ARMOR_STATE;
}else{
sscanf(buffer, "%f, %f", &yaw, &pitch);
}
cnt = 0;
}
}

11
monitor.sh Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/sh
exe=$1
while true; do
state=`ps aux | grep "$1" | grep -v grep | grep -v $0`
if [ ! "$state" ]; then
exec $exe &
echo "restart $exe"
fi
sleep 2
done

View File

@@ -0,0 +1,230 @@
//
// Created by zhikun on 18-11-7.
//
#include <camera/camera_wrapper.h>
using std::cout;
using std::endl;
using namespace cv;
CameraWrapper::CameraWrapper()
{
camera_cnts = 2;
camera_status0 = -1;
camera_status1 = -1;
iplImage0 = nullptr;
iplImage1 = nullptr;
channel0 = 3;
channel1 = 3;
}
bool CameraWrapper::init() {
CameraSdkInit(1);
//枚举设备,并建立设备列表
int camera_enumerate_device_status = CameraEnumerateDevice(camera_enum_list, &camera_cnts);
//cout<<"camera enumerate device status: "<<camera_enumerate_device_status<<endl;
//cout<<"camera number: "<<camera_cnts<<endl;
//没有连接设备
if (camera_cnts == 0) {
cout<<"No device detected!"<<endl;
return false;
}
else if(camera_cnts == 1)
{
cout<<"Only one camera device detected"<<endl;
return false;
}
else if(camera_cnts == 2)
{
cout<<"Two camera devices detected."<<endl;
}
else
{
cout<<"More than 2 cameras detected or some other error occurs."<<endl;
return false;
}
//相机初始化。初始化成功后,才能调用任何其他相机相关的操作接口
camera_status0 = CameraInit(&camera_enum_list[0], -1, -1, &h_camera0);
//初始化失败
if (camera_status0 != CAMERA_STATUS_SUCCESS) {
cout<<"Camera 0 initialization failed with code "<<camera_status0<<". See camera_status.h to find the code meaning."<<endl;
return false;
}
camera_status1 = CameraInit(&camera_enum_list[1], -1, -1, &h_camera1);
if (camera_status1 != CAMERA_STATUS_SUCCESS) {
cout<<"Camera 1 initialization failed with code "<<camera_status1<<". See camera_status.h to find the code meaning."<<endl;
return false;
}
CameraGetFriendlyName(h_camera0, camera_name0);
CameraGetFriendlyName(h_camera1, camera_name1);
cout<<"camera names: "<<camera_name0<<" "<<camera_name1<<endl;
// cout<<camera_name0<<endl;
// cout<<camera_name1<<endl;
//如果读取的相机列表不是0在左1在右则交换相机句柄
// if(strcmp(camera_name0, "camera0") != 0)
// {
// swapCameraHandle();
// }
//获得相机的特性描述结构体。该结构体中包含了相机可设置的各种参数的范围信息。决定了相关函数的参数
CameraGetCapability(h_camera0, &tCapability0);
CameraGetCapability(h_camera1, &tCapability1);
// set resolution to 320*240
// CameraSetImageResolution(hCamera, &(tCapability.pImageSizeDesc[2]));
rgb_buffer0 = (unsigned char *)malloc(tCapability0.sResolutionRange.iHeightMax *
tCapability0.sResolutionRange.iWidthMax * 3);
rgb_buffer1 = (unsigned char *)malloc(tCapability1.sResolutionRange.iHeightMax *
tCapability1.sResolutionRange.iWidthMax * 3);
CameraSetAeState(h_camera0, true); //设置是否自动曝光
CameraSetAeState(h_camera1, true);
/*让SDK进入工作模式开始接收来自相机发送的图像
数据。如果当前相机是触发模式,则需要接收到
触发帧以后才会更新图像。 */
CameraPlay(h_camera0);
CameraPlay(h_camera1);
/*其他的相机参数设置
例如 CameraSetExposureTime CameraGetExposureTime 设置/读取曝光时间
CameraSetImageResolution CameraGetImageResolution 设置/读取分辨率
CameraSetGamma、CameraSetConrast、CameraSetGain等设置图像伽马、对比度、RGB数字增益等等。
CameraGetFriendlyName CameraSetFriendlyName 获取/设置相机名称(该名称可写入相机硬件)
*/
// double exposure_time0, exposure_time1;
// CameraGetExposureTime(h_camera0, &exposure_time0);
// CameraGetExposureTime(h_camera1, &exposure_time1);
// cout<<"exposure time "<<exposure_time0<<" "<<exposure_time1<<endl;
// 抗频闪
CameraSetAntiFlick(h_camera0, true);
CameraSetAntiFlick(h_camera1, true);
if (tCapability0.sIspCapacity.bMonoSensor) {
channel0 = 1;
CameraSetIspOutFormat(h_camera0, CAMERA_MEDIA_TYPE_MONO8);
cout<<"camera0 mono "<<endl;
} else {
channel0 = 3;
CameraSetIspOutFormat(h_camera0, CAMERA_MEDIA_TYPE_BGR8);
cout<<"camera0 color mode"<<endl;
}
if (tCapability1.sIspCapacity.bMonoSensor) {
channel1 = 1;
CameraSetIspOutFormat(h_camera1, CAMERA_MEDIA_TYPE_MONO8);
cout<<"camera1 mono "<<endl;
} else {
channel1 = 3;
CameraSetIspOutFormat(h_camera1, CAMERA_MEDIA_TYPE_BGR8);
cout<<"camera1 color mode"<<endl;
}
return true;
}
bool CameraWrapper::read(cv::Mat& src0, cv::Mat& src1) {
return readRaw(src0, src1); //suit for using bayer hacking in armor_finder to replace process, fast and it can filter red and blue.
//return readProcessed(src0, src1); // processed color image, but this runs slowly, about half fps of previous one.
}
bool CameraWrapper::readRaw(cv::Mat &src0, cv::Mat &src1) {
if (CameraGetImageBuffer(h_camera0, &frame_info0, &pby_buffer0, 1000) == CAMERA_STATUS_SUCCESS &&
CameraGetImageBuffer(h_camera1, &frame_info1, &pby_buffer1, 1000) == CAMERA_STATUS_SUCCESS)
{
if (iplImage0) {
cvReleaseImageHeader(&iplImage0);
}
if (iplImage1){
cvReleaseImageHeader(&iplImage1);
}
iplImage0 = cvCreateImageHeader(cvSize(frame_info0.iWidth, frame_info0.iHeight), IPL_DEPTH_8U, 1);
iplImage1 = cvCreateImageHeader(cvSize(frame_info1.iWidth, frame_info1.iHeight), IPL_DEPTH_8U, 1);
cvSetData(iplImage0, pby_buffer0, frame_info0.iWidth); //此处只是设置指针,无图像块数据拷贝,不需担心转换效率
cvSetData(iplImage1, pby_buffer1, frame_info1.iWidth);
src0 = cv::cvarrToMat(iplImage0);
src1 = cv::cvarrToMat(iplImage1);
//在成功调用CameraGetImageBuffer后必须调用CameraReleaseImageBuffer来释放获得的buffer。
//否则再次调用CameraGetImageBuffer时程序将被挂起一直阻塞直到其他线程中调用CameraReleaseImageBuffer来释放了buffer
CameraReleaseImageBuffer(h_camera0, pby_buffer0);
CameraReleaseImageBuffer(h_camera1, pby_buffer1);
return true;
} else {
return false;
}
}
bool CameraWrapper::readProcessed(cv::Mat &src0, cv::Mat &src1) {
if (CameraGetImageBuffer(h_camera0, &frame_info0, &pby_buffer0, 1000) == CAMERA_STATUS_SUCCESS &&
CameraGetImageBuffer(h_camera1, &frame_info1, &pby_buffer1, 1000) == CAMERA_STATUS_SUCCESS)
{
CameraImageProcess(h_camera0, pby_buffer0, rgb_buffer0, &frame_info0); // this function is super slow, better not to use it.
CameraImageProcess(h_camera1, pby_buffer1, rgb_buffer1, &frame_info1);
if (iplImage0) {
cvReleaseImageHeader(&iplImage0);
}
if (iplImage1){
cvReleaseImageHeader(&iplImage1);
}
iplImage0 = cvCreateImageHeader(cvSize(frame_info0.iWidth, frame_info0.iHeight), IPL_DEPTH_8U, channel0);
iplImage1 = cvCreateImageHeader(cvSize(frame_info1.iWidth, frame_info1.iHeight), IPL_DEPTH_8U, channel1);
cvSetData(iplImage0, rgb_buffer0, frame_info0.iWidth * channel0); //此处只是设置指针,无图像块数据拷贝,不需担心转换效率
cvSetData(iplImage1, rgb_buffer1, frame_info1.iWidth * channel1);
src0 = cv::cvarrToMat(iplImage0);
src1 = cv::cvarrToMat(iplImage1);
//在成功调用CameraGetImageBuffer后必须调用CameraReleaseImageBuffer来释放获得的buffer。
//否则再次调用CameraGetImageBuffer时程序将被挂起一直阻塞直到其他线程中调用CameraReleaseImageBuffer来释放了buffer
CameraReleaseImageBuffer(h_camera0, pby_buffer0);
CameraReleaseImageBuffer(h_camera1, pby_buffer1);
return true;
} else {
return false;
}
}
CameraWrapper::~CameraWrapper()
{
CameraUnInit(h_camera0);
CameraUnInit(h_camera1);
//注意先反初始化后再free
free(rgb_buffer0);
free(rgb_buffer1);
}
void CameraWrapper::swapCameraHandle() {
int tmp_h_camera = h_camera0;
h_camera0 = h_camera1;
h_camera1 = tmp_h_camera;
}

View File

@@ -0,0 +1,23 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include "camera/video_wrapper.h"
VideoWrapper::VideoWrapper(const std::string &filename0, const std::string &filename1) {
video0.open(filename0);
video1.open(filename1);
}
VideoWrapper::~VideoWrapper() = default;
bool VideoWrapper::init() {
return video0.isOpened() && video1.isOpened();
}
bool VideoWrapper::read(cv::Mat &src_left, cv::Mat &src_right) {
return video0.read(src_left) && video1.read(src_right);
}

67
src/options/additions.cpp Normal file
View File

@@ -0,0 +1,67 @@
//
// Created by xinyang on 19-4-7.
//
#include <options/additions.h>
#include <options/options.h>
#include <stdio.h>
#include <log.h>
#include <opencv2/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
using namespace std;
#define VIDEO_SAVE_DIR "/home/sjturm/Desktop/video/"
static cv::VideoWriter *create_video_writer(){
FILE* info = fopen(VIDEO_SAVE_DIR"info.txt", "r");
int cnt=0;
fscanf(info, "%d", &cnt);
fclose(info);
info = fopen(VIDEO_SAVE_DIR"info.txt", "w");
fprintf(info, "%d", ++cnt);
char name[100];
sprintf(name, VIDEO_SAVE_DIR"%d.avi", cnt);
return new cv::VideoWriter(name, cv::VideoWriter::fourcc('P','I','M','1'), 80, cv::Size(640,480),false);
}
void save_video_file(cv::Mat &src){
static cv::VideoWriter *video = create_video_writer();
video->write(src);
}
#define SAVE_DIR "/home/sjturm/Desktop/labelled/"
int get_labelled_cnt(){
FILE *fp = fopen(SAVE_DIR"info.txt", "r");
int cnt=0;
fscanf(fp, "%d", &cnt);
fclose(fp);
return cnt+1;
}
void save_labelled_cnt(int cnt){
FILE *fp = fopen(SAVE_DIR"info.txt", "w");
fprintf(fp, "%d", cnt);
fclose(fp);
}
void save_labelled_image(cv::Mat &src, cv::Rect2d box){
static int cnt=get_labelled_cnt();
char name[50];
sprintf(name, SAVE_DIR"%d.jpg", cnt);
cv::imwrite(name, src);
sprintf(name, SAVE_DIR"%d.txt", cnt);
FILE *fp = fopen(name, "w");
if(fp == NULL){
LOGW("Can't create file: %s!\nStop saving!", name);
save_labelled = false;
return;
}
fprintf(fp, "%lf %lf %lf %lf\n", box.x, box.y, box.width, box.height);
fclose(fp);
save_labelled_cnt(cnt);
}

68
src/options/options.cpp Normal file
View File

@@ -0,0 +1,68 @@
//
// Created by xinyang on 19-3-27.
//
#include <options/options.h>
#include <log.h>
#include <cstring>
bool show_armor_box = false;
bool show_armor_boxes = false;
bool show_light_blobs = false;
bool show_origin = false;
bool save_labelled = false;
bool run_with_camera = false;
bool save_video = false;
bool collect_data = false;
void process_options(int argc, char *argv[]){
if(argc >= 2){
for(int i=1; i<argc; i++){
if(strcmp(argv[i], "--help") == 0){
LOGM("--show-armor-box: show the aim box.");
LOGM("--show-armor-boxes: show the candidate aim boxes.");
LOGM("--show-light-blobs: show the candidate light blobs.");
LOGM("--show-origin: show the origin image.");
LOGM("--save-label: save the image when box found.");
LOGM("--run-with-camera: start the program with camera directly without asking.");
LOGM("--save-video: save the video.");
LOGM("--collect-data: collect data sent from mcu.");
}else if(strcmp(argv[i], "--show-armor-box") == 0){
show_armor_box = true;
LOGM("Enable show armor box");
}else if(strcmp(argv[i], "--show-armor-boxes") == 0){
show_armor_boxes = true;
LOGM("Enable show armor boxes");
}else if(strcmp(argv[i], "--show-light-blobs") == 0) {
show_light_blobs = true;
LOGM("Enable show light blobs");
}else if(strcmp(argv[i], "--show-origin") == 0) {
show_origin = true;
LOGM("Enable show origin");
}else if(strcmp(argv[i], "--show-all") ==0 ) {
show_armor_box = true;
LOGM("Enable show armor box");
show_armor_boxes = true;
LOGM("Enable show armor boxes");
show_light_blobs = true;
LOGM("Enable show light blobs");
show_origin = true;
LOGM("Enable show origin");
}else if(strcmp(argv[i], "--save-labeled") == 0){
save_labelled = true;
LOGM("Enable save labeled");
}else if(strcmp(argv[i], "--run-with-camera") == 0){
run_with_camera = true;
LOGM("Run with camera!");
}else if(strcmp(argv[i], "--save-video") == 0){
save_video = true;
LOGM("Save video!");
}else if(strcmp(argv[i], "--collect-data") == 0){
collect_data = true;
LOGM("Enable data collection!");
}else{
LOGW("Unknown option: %s. Use --help to see options.", argv[i]);
}
}
}
}

171
src/uart/uart.cpp Normal file
View File

@@ -0,0 +1,171 @@
//
// Created by xixiliadorabarry on 1/24/19.
//
#include <uart/uart.h>
#include <energy/param_struct_define.h>
using std::cout;
using std::cerr;
using std::clog;
using std::dec;
using std::endl;
using std::hex;
GMAngle_t aim;
Uart::Uart(){
fd = open("/dev/ttyUSB0", O_RDWR);
if(fd < 0)
{
cerr<<"open port error"<<endl;
return;
}
if(set_opt(fd, 115200, 8, 'N', 1) < 0 )
{
cerr<<"set opt error"<<endl;
return;
}
cout<<"uart port success"<<endl;
buff[0] = 's';
buff[1] = '+';
buff[2] = (0 >> 8) & 0xFF;
buff[3] = 0 & 0xFF;
buff[4] = '+';
buff[5] = (0 >> 8) & 0xFF;
buff[6] = (0 & 0xFF);
buff[7] = 'e';
fps = 0;
cur_time = time(nullptr);
}
int Uart::set_opt(int fd, int nSpeed, int nBits, char nEvent, int nStop) {
termios newtio{}, oldtio{};
if (tcgetattr(fd, &oldtio) != 0) {
perror("SetupSerial 1");
return -1;
}
bzero(&newtio, sizeof(newtio));
newtio.c_cflag |= CLOCAL | CREAD;
newtio.c_cflag &= ~CSIZE;
switch (nBits) {
case 7:
newtio.c_cflag |= CS7;break;
case 8:
newtio.c_cflag |= CS8;break;
default:break;
}
switch (nEvent) {
case 'O': //奇校验
newtio.c_cflag |= PARENB;
newtio.c_cflag |= PARODD;
newtio.c_iflag |= (INPCK | ISTRIP);
break;
case 'E': //偶校验
newtio.c_iflag |= (INPCK | ISTRIP);
newtio.c_cflag |= PARENB;
newtio.c_cflag &= ~PARODD;
break;
case 'N': //无校验
newtio.c_cflag &= ~PARENB;
break;
default:break;
}
switch (nSpeed) {
case 2400:
cfsetispeed(&newtio, B2400);
cfsetospeed(&newtio, B2400);
break;
case 4800:
cfsetispeed(&newtio, B4800);
cfsetospeed(&newtio, B4800);
break;
case 9600:
cfsetispeed(&newtio, B9600);
cfsetospeed(&newtio, B9600);
break;
case 115200:
cfsetispeed(&newtio, B115200);
cfsetospeed(&newtio, B115200);
break;
default:
cfsetispeed(&newtio, B9600);
cfsetospeed(&newtio, B9600);
break;
}
if (nStop == 1) {
newtio.c_cflag &= ~CSTOPB;
} else if (nStop == 2) {
newtio.c_cflag |= CSTOPB;
}
newtio.c_cc[VTIME] = 0;
newtio.c_cc[VMIN] = 0;
tcflush(fd, TCIFLUSH);
if ((tcsetattr(fd, TCSANOW, &newtio)) != 0) {
perror("com set error");
return -1;
}
printf("set done!\n");
return 0;
}
void Uart::sendTarget(float x, float y, float z) {
static short x_tmp, y_tmp, z_tmp;
time_t t = time(nullptr);
if(cur_time != t)
{
cur_time = t;
cout<<"fps:"<<fps<<", ("<<x<<","<<y<<","<<z<<")"<<endl;
fps = 0;
}
fps += 1;
x_tmp= static_cast<short>(x * (32768 - 1) / 100);
y_tmp= static_cast<short>(y * (32768 - 1) / 100);
z_tmp= static_cast<short>(z * (32768 - 1) / 1000);
buff[0] = 's';
buff[1] = static_cast<char>((x_tmp >> 8) & 0xFF);
buff[2] = static_cast<char>((x_tmp >> 0) & 0xFF);
buff[3] = static_cast<char>((y_tmp >> 8) & 0xFF);
buff[4] = static_cast<char>((y_tmp >> 0) & 0xFF);
buff[5] = static_cast<char>((z_tmp >> 8) & 0xFF);
buff[6] = static_cast<char>((z_tmp >> 0) & 0xFF);
buff[7] = 'e';
write(fd, buff, 8);
}
// 's' + (x) ( 8bit + 8bit ) + (y) ( 8bit + 8bit ) + (z) ( 8bit + 8bit ) + 'e'
char Uart::receive() {
char data;
while(read(fd, &data, 1) < 1);
return data;
}
void Uart::receive_data() {
char Enemy_Info[6] = {0};
read(fd, &Enemy_Info, 6);
if(Enemy_Info[0]=='s'&&Enemy_Info[5]=='e'){
aim.yaw = static_cast<float>(((Enemy_Info[1]<<8)|(Enemy_Info[2]))*(100.0 / (32768.0 - 1.0)));
aim.pitch = static_cast<float>(((Enemy_Info[3]<<8)|(Enemy_Info[4]))*(100.0 / (32768.0 - 1.0)));
}
else return;
}