大版本更新。

This commit is contained in:
xinyang
2019-07-06 20:20:48 +08:00
parent 9f9050e04a
commit e9a0e9ad7b
23 changed files with 15297 additions and 20022 deletions

1
.gitignore vendored
View File

@@ -2,5 +2,6 @@ cmake-build-debug
build build
.idea .idea
Mark Mark
armor_box_photo
tools/TrainCNN/.idea tools/TrainCNN/.idea
tools/TrainCNN/__pycache__ tools/TrainCNN/__pycache__

View File

@@ -26,6 +26,7 @@ private:
const uint8_t &enemy_color; const uint8_t &enemy_color;
State state; State state;
cv::Rect2d armor_box; cv::Rect2d armor_box;
int boxid;
cv::Ptr<cv::Tracker> tracker; cv::Ptr<cv::Tracker> tracker;
cv::Mat src_gray; cv::Mat src_gray;

View File

@@ -9,11 +9,13 @@
#include <opencv2/highgui.hpp> #include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp> #include <opencv2/imgproc.hpp>
#include <armor_finder/armor_finder.h> #include <armor_finder/armor_finder.h>
#include <map>
extern std::map<int, string> id2name;
void showArmorBoxVector(std::string windows_name, const cv::Mat &src, const std::vector<cv::Rect2d> &armor_box); void showArmorBoxVector(std::string windows_name, const cv::Mat &src, const std::vector<cv::Rect2d> &armor_box);
void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor_box); void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor_box, int boxid);
void showContours(std::string windows_name, const cv::Mat &src, const std::vector<LightBlob> &light_blobs); void showContours(std::string windows_name, const cv::Mat &src, const std::vector<LightBlob> &light_blobs);
void showArmorBoxClass(std::string window_names, const cv::Mat &src, vector<cv::Rect2d> boxes[10]); void showArmorBoxClass(std::string window_names, const cv::Mat &src, vector<cv::Rect2d> boxes[10]);
void showCuoWeiDu(const cv::Mat &src, const std::vector<LightBlob> &light_blobs);
#endif /* _SHOW_IMAGES_H_ */ #endif /* _SHOW_IMAGES_H_ */

View File

@@ -14,7 +14,8 @@ ArmorFinder::ArmorFinder(uint8_t &color, Serial &u, string paras_folder, const u
state(STANDBY_STATE), state(STANDBY_STATE),
classifier(std::move(paras_folder)), classifier(std::move(paras_folder)),
contour_area(0), contour_area(0),
use_classifier(use) use_classifier(use),
boxid(-1)
{ {
} }
@@ -25,11 +26,11 @@ void ArmorFinder::run(cv::Mat &src) {
cv::cvtColor(src_use, src_gray, CV_RGB2GRAY); cv::cvtColor(src_use, src_gray, CV_RGB2GRAY);
if(show_armor_box){ if(show_armor_box){
showArmorBox("box", src, armor_box); showArmorBox("box", src, armor_box, boxid);
cv::waitKey(1); cv::waitKey(1);
} }
// stateSearchingTarget(src_use); stateSearchingTarget(src_use);
// return; return;
switch (state){ switch (state){
case SEARCHING_STATE: case SEARCHING_STATE:
if(stateSearchingTarget(src_use)){ if(stateSearchingTarget(src_use)){

View File

@@ -300,6 +300,7 @@ int Classifier::operator()(const cv::Mat &image) {
vector<MatrixXd> sub = {b, g, r}; vector<MatrixXd> sub = {b, g, r};
vector<vector<MatrixXd>> in = {sub}; vector<vector<MatrixXd>> in = {sub};
MatrixXd result = calculate(in); MatrixXd result = calculate(in);
// cout << result << "==============" <<endl;
MatrixXd::Index minRow, minCol; MatrixXd::Index minRow, minCol;
result.maxCoeff(&minRow, &minCol); result.maxCoeff(&minRow, &minCol);
return minRow; return minRow;

View File

@@ -168,13 +168,42 @@ bool lengthRatioJudge(const LightBlob &light_blob_i, const LightBlob &light_blob
&& light_blob_i.length / light_blob_j.length > 0.5); && light_blob_i.length / light_blob_j.length > 0.5);
} }
/* 判断两个灯条的错位度,不知道英文是什么!!! */
bool CuoWeiDuJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j){
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle:
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle:
light_blob_j.rect.angle - 90;
float angle = (angle_i+angle_j)/2.0/180.0*3.14159265459;
if(abs(angle_i-angle_j)>90){
angle += 3.14159265459/2;
}
Vector2f orientation(cos(angle), sin(angle));
Vector2f p2p(light_blob_j.rect.center.x-light_blob_i.rect.center.x, light_blob_j.rect.center.y-light_blob_i.rect.center.y);
return abs(orientation.dot(p2p)) < 20;
}
bool boxAngleJudge(const LightBlob &light_blob_i, const LightBlob &light_blob_j){
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle:
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle:
light_blob_j.rect.angle - 90;
float angle = (angle_i+angle_j)/2.0;
if(abs(angle_i-angle_j)>90){
angle += 90.0;
}
return (-120.0<angle && angle<-60.0) || (60.0<angle && angle<120.0);
}
bool isCoupleLight(const LightBlob &light_blob_i, const LightBlob &light_blob_j, uint8_t enemy_color) { bool isCoupleLight(const LightBlob &light_blob_i, const LightBlob &light_blob_j, uint8_t enemy_color) {
return light_blob_i.BlobColor == enemy_color && return light_blob_i.BlobColor == enemy_color &&
light_blob_j.BlobColor == enemy_color && light_blob_j.BlobColor == enemy_color &&
lengthRatioJudge(light_blob_i, light_blob_j) && lengthRatioJudge(light_blob_i, light_blob_j) &&
lengthJudge(light_blob_i, light_blob_j) && lengthJudge(light_blob_i, light_blob_j) &&
heightJudge(light_blob_i, light_blob_j) && // heightJudge(light_blob_i, light_blob_j) &&
angelJudge(light_blob_i, light_blob_j); angelJudge(light_blob_i, light_blob_j) &&
boxAngleJudge(light_blob_i, light_blob_j) &&
CuoWeiDuJudge(light_blob_i, light_blob_j);
} }
@@ -195,8 +224,8 @@ static bool findArmorBoxes(LightBlobs &light_blobs, std::vector<cv::Rect2d> &arm
double min_x, min_y, max_x, max_y; double min_x, min_y, max_x, max_y;
min_x = fmin(rect_left.x, rect_right.x) - 4; min_x = fmin(rect_left.x, rect_right.x) - 4;
max_x = fmax(rect_left.x + rect_left.width, rect_right.x + rect_right.width) + 4; max_x = fmax(rect_left.x + rect_left.width, rect_right.x + rect_right.width) + 4;
min_y = fmin(rect_left.y, rect_right.y) - 0.3*(rect_left.height+rect_right.height)/2.0; min_y = fmin(rect_left.y, rect_right.y) - 0.5*(rect_left.height+rect_right.height)/2.0;
max_y = fmax(rect_left.y + rect_left.height, rect_right.y + rect_right.height) + 0.3*(rect_left.height+rect_right.height)/2.0; max_y = fmax(rect_left.y + rect_left.height, rect_right.y + rect_right.height) + 0.5*(rect_left.height+rect_right.height)/2.0;
if (min_x < 0 || max_x > 640 || min_y < 0 || max_y > 480) { if (min_x < 0 || max_x > 640 || min_y < 0 || max_y > 480) {
continue; continue;
} }
@@ -251,13 +280,13 @@ void get_blob_color(const cv::Mat &src, std::vector<LightBlob> &blobs) {
} }
} }
int prior_red[] = {0, 2, 3, 4, 1, 5, 7, 8, 9, 6}; int prior_blue[] = {6, 0, 2, 3, 4, 5, 1, 13, 7, 9, 10, 11, 12, 8};
int prior_blue[]= {5, 7, 8, 9, 6, 0, 2, 3, 4, 1}; int prior_red[]= {13, 7, 9, 10, 11, 12, 8, 6, 0, 2, 3, 4, 5, 1};
bool ArmorFinder::stateSearchingTarget(cv::Mat &src) { bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
cv::Mat split, src_bin/*, edge*/; cv::Mat split, src_bin/*, edge*/;
LightBlobs light_blobs, light_blobs_, light_blobs_real; LightBlobs light_blobs, light_blobs_, light_blobs_real;
std::vector<cv::Rect2d> armor_boxes, boxes_number[10]; std::vector<cv::Rect2d> armor_boxes, boxes_number[14];
armor_box = cv::Rect2d(0,0,0,0); armor_box = cv::Rect2d(0,0,0,0);
cv::cvtColor(src, src_gray, CV_BGR2GRAY); cv::cvtColor(src, src_gray, CV_BGR2GRAY);
@@ -295,7 +324,8 @@ bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
light_blobs_real = light_blobs; light_blobs_real = light_blobs;
get_blob_color(src, light_blobs_real); get_blob_color(src, light_blobs_real);
if(show_light_blobs){ if(show_light_blobs){
showContours("blobs_real", src, light_blobs_real); showContours("light_blobs", src, light_blobs_real);
// showCuoWeiDu(src, light_blobs_real);
cv::waitKey(1); cv::waitKey(1);
} }
@@ -319,6 +349,7 @@ bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
for(auto id : prior_blue){ for(auto id : prior_blue){
if(!boxes_number[id].empty()){ if(!boxes_number[id].empty()){
armor_box = boxes_number[id][0]; armor_box = boxes_number[id][0];
boxid = id;
break; break;
} }
} }
@@ -326,6 +357,7 @@ bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
for(auto id : prior_red){ for(auto id : prior_red){
if(!boxes_number[id].empty()){ if(!boxes_number[id].empty()){
armor_box = boxes_number[id][0]; armor_box = boxes_number[id][0];
boxid = id;
break; break;
} }
} }
@@ -337,9 +369,17 @@ bool ArmorFinder::stateSearchingTarget(cv::Mat &src) {
} }
if(show_armor_boxes){ if(show_armor_boxes){
showArmorBoxClass("class", src, boxes_number); showArmorBoxClass("class", src, boxes_number);
for(int i=0; i<sizeof(boxes_number)/ sizeof(boxes_number[0]); i++){
for(auto &box : boxes_number[i]){
char filename[100];
sprintf(filename, PROJECT_DIR"/armor_box_photo/%s_%d.jpg", id2name[i].data(), time(nullptr)+clock());
cv::imwrite(filename, src(box));
}
}
} }
}else{ }else{
armor_box = armor_boxes[0]; armor_box = armor_boxes[0];
boxid = -1;
} }
if(split.size() == cv::Size(320, 240)){ if(split.size() == cv::Size(320, 240)){
armor_box.x *= 2; armor_box.x *= 2;

View File

@@ -1,7 +1,26 @@
#include <show_images/show_images.h> #include <show_images/show_images.h>
#include <log.h>
using namespace cv; using namespace cv;
std::map<int, string> id2name = {
{-1, "NO"},
{ 0, "B1"},
{ 1, "B2"},
{ 2, "B3"},
{ 3, "B4"},
{ 4, "B5"},
{ 5, "B7"},
{ 6, "B8"},
{ 7, "R1"},
{ 8, "R2"},
{ 9, "R3"},
{10, "R4"},
{11, "R5"},
{12, "R7"},
{13, "R8"},
};
void showArmorBoxVector(std::string windows_name, const cv::Mat &src, const std::vector<cv::Rect2d> &armor_box) { void showArmorBoxVector(std::string windows_name, const cv::Mat &src, const std::vector<cv::Rect2d> &armor_box) {
static Mat image2show; static Mat image2show;
if (src.type() == CV_8UC1) // 黑白图像 if (src.type() == CV_8UC1) // 黑白图像
@@ -27,20 +46,25 @@ void showArmorBoxClass(std::string window_names, const cv::Mat &src, vector<cv::
{ {
image2show = src.clone(); image2show = src.clone();
} }
for(int i=0; i<10; i++){ for(int i=0; i<14; i++){
if(!boxes[i].empty()){ if(!boxes[i].empty()){
for(auto box : boxes[i]){ for(auto box : boxes[i]){
char buff[2] = {0};
buff[0] = i + '0';
rectangle(image2show, box, Scalar(0, 255, 0), 1); rectangle(image2show, box, Scalar(0, 255, 0), 1);
putText(image2show, buff, Point(box.x+2, box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(255,0,0)); if(i == -1)
putText(image2show, id2name[i], Point(box.x+2, box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(0,255,0));
else if(0<=i && i<7)
putText(image2show, id2name[i], Point(box.x+2, box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(255,0,0));
else if(7<=i && i<14)
putText(image2show, id2name[i], Point(box.x+2, box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(0,0,255));
else
LOGE_INFO("Invalid box id:%d!", i);
} }
} }
} }
imshow(window_names, image2show); imshow(window_names, image2show);
} }
void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor_box) { void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor_box, int boxid) {
static Mat image2show; static Mat image2show;
if (src.type() == CV_8UC1) // 黑白图像 if (src.type() == CV_8UC1) // 黑白图像
{ {
@@ -50,6 +74,14 @@ void showArmorBox(std::string windows_name, const cv::Mat &src, cv::Rect2d armor
image2show = src.clone(); image2show = src.clone();
} }
rectangle(image2show, armor_box, Scalar(0, 255, 0), 1); rectangle(image2show, armor_box, Scalar(0, 255, 0), 1);
if(boxid == -1)
putText(image2show, id2name[boxid], Point(armor_box.x+2, armor_box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(0,255,0));
else if(0<=boxid && boxid<7)
putText(image2show, id2name[boxid], Point(armor_box.x+2, armor_box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(255,0,0));
else if(7<=boxid && boxid<14)
putText(image2show, id2name[boxid], Point(armor_box.x+2, armor_box.y+2), cv::FONT_HERSHEY_TRIPLEX, 1, Scalar(0,0,255));
else
LOGE_INFO("Invalid box id:%d!", boxid);
imshow(windows_name, image2show); imshow(windows_name, image2show);
} }
@@ -67,13 +99,49 @@ void showContours(std::string windows_name, const cv::Mat &src, const std::vecto
for(const auto &light_blob:light_blobs) for(const auto &light_blob:light_blobs)
{ {
Scalar color;
if(light_blob.BlobColor == BLOB_RED) if(light_blob.BlobColor == BLOB_RED)
rectangle(image2show, light_blob.rect.boundingRect(), Scalar(0,0,255), 3); color = Scalar(0,0,255);
if(light_blob.BlobColor == BLOB_BLUE) else if(light_blob.BlobColor == BLOB_BLUE)
rectangle(image2show, light_blob.rect.boundingRect(), Scalar(255,0,0), 3); color = Scalar(255,0,0);
else else
rectangle(image2show, light_blob.rect.boundingRect(), Scalar(0,255,0), 3); color = Scalar(0,255,0);
cv::Point2f vertices[4];
light_blob.rect.points(vertices);
for (int j = 0; j < 4; j++){
cv::line(image2show, vertices[j], vertices[(j + 1) % 4], color, 2);
}
} }
imshow(windows_name, image2show); imshow(windows_name, image2show);
} }
void drawCuoWeiDu(cv::Mat &src, const LightBlob &light_blob_i, const LightBlob &light_blob_j){
float angle_i = light_blob_i.rect.size.width > light_blob_i.rect.size.height ? light_blob_i.rect.angle:
light_blob_i.rect.angle - 90;
float angle_j = light_blob_j.rect.size.width > light_blob_j.rect.size.height ? light_blob_j.rect.angle:
light_blob_j.rect.angle - 90;
float angle = (angle_i+angle_j)/2.0/180.0*3.14159265459;
if(abs(angle_i-angle_j)>90){
angle += 3.14159265459/2;
}
Point2f orientation(cos(angle), sin(angle));
Vector2f p2p(light_blob_j.rect.center.x-light_blob_i.rect.center.x, light_blob_i.rect.center.y-light_blob_j.rect.center.y);
cv::line(
src,
(light_blob_j.rect.center+light_blob_i.rect.center)/2.0,
(light_blob_j.rect.center+light_blob_i.rect.center)/2.0 + 100*orientation,
Scalar(0,255,0),
2
);
}
void showCuoWeiDu(const cv::Mat &src, const std::vector<LightBlob> &light_blobs){
Mat image2show = src.clone();
for (int i = 0; i < light_blobs.size() - 1; ++i) {
for (int j = i + 1; j < light_blobs.size(); ++j) {
drawCuoWeiDu(image2show, light_blobs[i], light_blobs[j]);
}
}
imshow("CuoWeiDu", image2show);
}

View File

@@ -32,7 +32,7 @@ mcu_data mcuData = {
ARMOR_STATE, ARMOR_STATE,
0, 0,
1, 1,
ENEMY_RED, ENEMY_BLUE,
}; };
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
@@ -57,14 +57,9 @@ int main(int argc, char *argv[]) {
WrapperHead *video_armor=nullptr; WrapperHead *video_armor=nullptr;
WrapperHead *video_energy=nullptr; WrapperHead *video_energy=nullptr;
if (from_camera) { if (from_camera) {
video_armor = new CameraWrapper(0, "armor"); video_armor = new CameraWrapper(0/*, "armor"*/);
video_energy = new CameraWrapper(1, "energy"); video_energy = new CameraWrapper(1, "energy");
} else { } else {
// string armor_video, energy_video;
// lastVideo(armor_video, PROJECT_DIR"/armor_video/");
// video_armor = new VideoWrapper(armor_video);
// lastVideo(energy_video, PROJECT_DIR"/energy_video/");
// video_energy = new VideoWrapper(energy_video);
video_armor = new VideoWrapper("/home/sjturm/Desktop/valid_video/armor/65.avi"); video_armor = new VideoWrapper("/home/sjturm/Desktop/valid_video/armor/65.avi");
video_energy = new VideoWrapper("/home/sjturm/Desktop/valid_video/energy/121.avi"); video_energy = new VideoWrapper("/home/sjturm/Desktop/valid_video/energy/121.avi");
} }

View File

@@ -76,9 +76,9 @@ bool CameraWrapper::init() {
#elif defined(Linux) #elif defined(Linux)
CameraSetAeState(h_camera, false); CameraSetAeState(h_camera, false);
CameraSetExposureTime(h_camera, 10*1000); CameraSetExposureTime(h_camera, 10*1000);
CameraSetAnalogGain(h_camera, 20); CameraSetAnalogGain(h_camera, 40);
if(mode == 0){ if(mode == 0){
CameraSetGain(h_camera, 100, 130, 112); CameraSetGain(h_camera, 100, 100, 100);
CameraSetLutMode(h_camera, LUTMODE_PRESET); CameraSetLutMode(h_camera, LUTMODE_PRESET);
} }
#endif #endif

152
tools/TrainCNN/backward.py Normal file → Executable file
View File

@@ -5,8 +5,8 @@ from tqdm import tqdm
import generate import generate
import forward import forward
import cv2 import cv2
import sys
import numpy as np import numpy as np
import mvsdk
print("Finish!") print("Finish!")
def save_kernal(fp, val): def save_kernal(fp, val):
@@ -54,7 +54,7 @@ def save_para(folder, paras):
save_bias(fp, paras[7]) save_bias(fp, paras[7])
STEPS = 100000 STEPS = 5000
BATCH = 30 BATCH = 30
LEARNING_RATE_BASE = 0.01 LEARNING_RATE_BASE = 0.01
LEARNING_RATE_DECAY = 0.99 LEARNING_RATE_DECAY = 0.99
@@ -62,12 +62,16 @@ MOVING_AVERAGE_DECAY = 0.99
def train(dataset, show_bar=False): def train(dataset, show_bar=False):
test_images, test_labels = dataset.all_test_sets()
x = tf.placeholder(tf.float32, [None, generate.SRC_ROWS, generate.SRC_COLS, generate.SRC_CHANNELS]) x = tf.placeholder(tf.float32, [None, generate.SRC_ROWS, generate.SRC_COLS, generate.SRC_CHANNELS])
y_= tf.placeholder(tf.float32, [None, forward.OUTPUT_NODES]) y_= tf.placeholder(tf.float32, [None, forward.OUTPUT_NODES])
nodes, vars = forward.forward(x, 0.001) keep_rate = tf.placeholder(tf.float32)
nodes, vars = forward.forward(x, 0.01)
y = nodes[-1] y = nodes[-1]
ce = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1)) # ce = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1))
ce = tf.nn.weighted_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1), pos_weight=1)
cem = tf.reduce_mean(ce) cem = tf.reduce_mean(ce)
loss= cem + tf.add_n(tf.get_collection("losses")) loss= cem + tf.add_n(tf.get_collection("losses"))
@@ -87,72 +91,118 @@ def train(dataset, show_bar=False):
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1)) correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True)) config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
with tf.Session(config=config) as sess: with tf.Session(config=config) as sess:
init_op = tf.global_variables_initializer() init_op = tf.global_variables_initializer()
sess.run(init_op) sess.run(init_op)
bar = tqdm(range(STEPS), dynamic_ncols=True) bar = tqdm(range(STEPS), ascii=True, dynamic_ncols=True)
for i in bar: for i in bar:
images_samples, labels_samples = dataset.sample_train_sets(BATCH) images_samples, labels_samples = dataset.sample_train_sets(BATCH)
_, loss_value, step = sess.run( _, loss_value, step = sess.run(
[train_op, loss, global_step], [train_op, loss, global_step],
feed_dict={x: images_samples, y_: labels_samples} feed_dict={x: images_samples, y_: labels_samples, keep_rate:0.7}
) )
if i % 100 == 0: if i % 100 == 0:
if i % 1000 == 0: if i % 1000 == 0:
test_samples, test_labels = dataset.sample_test_sets(1000) acc = sess.run(accuracy, feed_dict={x: test_images, y_: test_labels, keep_rate:1.0})
acc = sess.run(accuracy, feed_dict={x: test_samples, y_: test_labels})
bar.set_postfix({"loss": loss_value, "acc": acc}) bar.set_postfix({"loss": loss_value, "acc": acc})
# video = cv2.VideoCapture("/home/xinyang/Desktop/Video.mp4")
# _ = True
# while _:
# _, frame = video.read()
# cv2.imshow("Video", frame)
# k = cv2.waitKey(10)
# if k == ord(" "):
# bbox = cv2.selectROI("frame", frame, False)
# print(bbox)
# roi = frame[bbox[1]:bbox[1]+bbox[3], bbox[0]:bbox[0]+bbox[2]]
# roi = cv2.resize(roi, (48, 36))
# cv2.imshow("roi", roi)
# cv2.waitKey(0)
# roi = roi.astype(np.float32)
# roi /= 255.0
# roi = roi.reshape([1, 36, 48, 3])
# res = sess.run(y, feed_dict={x: roi})
# res = res.reshape([forward.OUTPUT_NODES])
# print(np.argmax(res))
# elif k==ord("q"):
# break
# keep = True
# while keep:
# n = input()
# im = cv2.imread(n)
# im = cv2.resize(im, (48, 36))
# cv2.imshow("im", im)
# if cv2.waitKey(0) == ord("q"):
# keep = False
# im = im.astype(np.float32)
# im /= 255.0
# im = im.reshape([1, 36, 48, 3])
# res = sess.run(y, feed_dict={x: im})
# res = res.reshape([forward.OUTPUT_NODES])
# print(np.argmax(res))
test_samples, test_labels = dataset.sample_test_sets(100)
vars_val = sess.run(vars) vars_val = sess.run(vars)
save_para("/home/xinyang/Desktop/AutoAim/tools/para", vars_val) save_para("/home/xinyang/Desktop/RM_auto-aim/tools/para", vars_val)
nodes_val = sess.run(nodes, feed_dict={x:test_samples}) print("save done!")
return vars_val, nodes_val, test_samples # nodes_val = sess.run(nodes, feed_dict={x:test_images})
# return vars_val, nodes_val
DevList = mvsdk.CameraEnumerateDevice()
nDev = len(DevList)
if nDev < 1:
print("No camera was found!")
return
for i, DevInfo in enumerate(DevList):
print("{}: {} {}".format(i, DevInfo.GetFriendlyName(), DevInfo.GetPortType()))
i = 0 if nDev == 1 else int(input("Select camera: "))
DevInfo = DevList[i]
print(DevInfo)
# 打开相机
hCamera = 0
try:
hCamera = mvsdk.CameraInit(DevInfo, -1, -1)
except mvsdk.CameraException as e:
print("CameraInit Failed({}): {}".format(e.error_code, e.message) )
return
# 获取相机特性描述
cap = mvsdk.CameraGetCapability(hCamera)
# 判断是黑白相机还是彩色相机
monoCamera = (cap.sIspCapacity.bMonoSensor != 0)
# 黑白相机让ISP直接输出MONO数据而不是扩展成R=G=B的24位灰度
if monoCamera:
mvsdk.CameraSetIspOutFormat(hCamera, mvsdk.CAMERA_MEDIA_TYPE_MONO8)
else:
mvsdk.CameraSetIspOutFormat(hCamera, mvsdk.CAMERA_MEDIA_TYPE_BGR8)
# 相机模式切换成连续采集
mvsdk.CameraSetTriggerMode(hCamera, 0)
# 手动曝光曝光时间30ms
mvsdk.CameraSetAeState(hCamera, 0)
mvsdk.CameraSetExposureTime(hCamera, 30 * 1000)
# 让SDK内部取图线程开始工作
mvsdk.CameraPlay(hCamera)
# 计算RGB buffer所需的大小这里直接按照相机的最大分辨率来分配
FrameBufferSize = cap.sResolutionRange.iWidthMax * cap.sResolutionRange.iHeightMax * (1 if monoCamera else 3)
# 分配RGB buffer用来存放ISP输出的图像
# 备注从相机传输到PC端的是RAW数据在PC端通过软件ISP转为RGB数据如果是黑白相机就不需要转换格式但是ISP还有其它处理所以也需要分配这个buffer
pFrameBuffer = mvsdk.CameraAlignMalloc(FrameBufferSize, 16)
while (cv2.waitKey(1) & 0xFF) != ord('q'):
# 从相机取一帧图片
try:
pRawData, FrameHead = mvsdk.CameraGetImageBuffer(hCamera, 200)
mvsdk.CameraImageProcess(hCamera, pRawData, pFrameBuffer, FrameHead)
mvsdk.CameraReleaseImageBuffer(hCamera, pRawData)
# 此时图片已经存储在pFrameBuffer中对于彩色相机pFrameBuffer=RGB数据黑白相机pFrameBuffer=8位灰度数据
# 把pFrameBuffer转换成opencv的图像格式以进行后续算法处理
frame_data = (mvsdk.c_ubyte * FrameHead.uBytes).from_address(pFrameBuffer)
frame = np.frombuffer(frame_data, dtype=np.uint8)
frame = frame.reshape((FrameHead.iHeight, FrameHead.iWidth, 1 if FrameHead.uiMediaType == mvsdk.CAMERA_MEDIA_TYPE_MONO8 else 3) )
frame = cv2.resize(frame, (640,480), interpolation = cv2.INTER_LINEAR)
cv2.imshow("Press q to end", frame)
if (cv2.waitKey(1)&0xFF) == ord(' '):
roi = cv2.selectROI("roi", frame)
roi = frame[roi[1]:roi[1]+roi[3], roi[0]:roi[0]+roi[2]]
print(roi)
cv2.imshow("box", roi)
image = cv2.resize(roi, (48, 36))
image = image.astype(np.float32) / 255.0
out = sess.run(y, feed_dict={x:[image]})
print(out)
print(np.argmax(out))
except mvsdk.CameraException as e:
if e.error_code != mvsdk.CAMERA_STATUS_TIME_OUT:
print("CameraGetImageBuffer failed({}): {}".format(e.error_code, e.message) )
# 关闭相机
mvsdk.CameraUnInit(hCamera)
# 释放帧缓存
mvsdk.CameraAlignFree(pFrameBuffer)
if __name__ == "__main__": if __name__ == "__main__":
print("Loading data sets...") dataset = generate.DataSet("/home/xinyang/Desktop/box_cut")
dataset = generate.DataSet("/home/xinyang/Desktop/dataset/box")
print("Finish!")
train(dataset, show_bar=True) train(dataset, show_bar=True)
input("Press any key to end...")

94
tools/TrainCNN/cv_grab.py Normal file
View File

@@ -0,0 +1,94 @@
#coding=utf-8
import cv2
import numpy as np
import mvsdk
def main_loop():
# 枚举相机
DevList = mvsdk.CameraEnumerateDevice()
nDev = len(DevList)
if nDev < 1:
print("No camera was found!")
return
for i, DevInfo in enumerate(DevList):
print("{}: {} {}".format(i, DevInfo.GetFriendlyName(), DevInfo.GetPortType()))
i = 0 if nDev == 1 else int(input("Select camera: "))
DevInfo = DevList[i]
print(DevInfo)
# 打开相机
hCamera = 0
try:
hCamera = mvsdk.CameraInit(DevInfo, -1, -1)
except mvsdk.CameraException as e:
print("CameraInit Failed({}): {}".format(e.error_code, e.message) )
return
# 获取相机特性描述
cap = mvsdk.CameraGetCapability(hCamera)
# 判断是黑白相机还是彩色相机
monoCamera = (cap.sIspCapacity.bMonoSensor != 0)
# 黑白相机让ISP直接输出MONO数据而不是扩展成R=G=B的24位灰度
if monoCamera:
mvsdk.CameraSetIspOutFormat(hCamera, mvsdk.CAMERA_MEDIA_TYPE_MONO8)
else:
mvsdk.CameraSetIspOutFormat(hCamera, mvsdk.CAMERA_MEDIA_TYPE_BGR8)
# 相机模式切换成连续采集
mvsdk.CameraSetTriggerMode(hCamera, 0)
# 手动曝光曝光时间30ms
mvsdk.CameraSetAeState(hCamera, 0)
mvsdk.CameraSetExposureTime(hCamera, 30 * 1000)
# 让SDK内部取图线程开始工作
mvsdk.CameraPlay(hCamera)
# 计算RGB buffer所需的大小这里直接按照相机的最大分辨率来分配
FrameBufferSize = cap.sResolutionRange.iWidthMax * cap.sResolutionRange.iHeightMax * (1 if monoCamera else 3)
# 分配RGB buffer用来存放ISP输出的图像
# 备注从相机传输到PC端的是RAW数据在PC端通过软件ISP转为RGB数据如果是黑白相机就不需要转换格式但是ISP还有其它处理所以也需要分配这个buffer
pFrameBuffer = mvsdk.CameraAlignMalloc(FrameBufferSize, 16)
while (cv2.waitKey(1) & 0xFF) != ord('q'):
# 从相机取一帧图片
try:
pRawData, FrameHead = mvsdk.CameraGetImageBuffer(hCamera, 200)
mvsdk.CameraImageProcess(hCamera, pRawData, pFrameBuffer, FrameHead)
mvsdk.CameraReleaseImageBuffer(hCamera, pRawData)
# 此时图片已经存储在pFrameBuffer中对于彩色相机pFrameBuffer=RGB数据黑白相机pFrameBuffer=8位灰度数据
# 把pFrameBuffer转换成opencv的图像格式以进行后续算法处理
frame_data = (mvsdk.c_ubyte * FrameHead.uBytes).from_address(pFrameBuffer)
frame = np.frombuffer(frame_data, dtype=np.uint8)
frame = frame.reshape((FrameHead.iHeight, FrameHead.iWidth, 1 if FrameHead.uiMediaType == mvsdk.CAMERA_MEDIA_TYPE_MONO8 else 3) )
frame = cv2.resize(frame, (640,480), interpolation = cv2.INTER_LINEAR)
cv2.imshow("Press q to end", frame)
roi = cv2.selectROI("roi", frame)
roi = frame[roi[1]:roi[1]+roi[3], roi[0]:roi[0]+roi[2]]
print(roi)
cv2.imshow("box", roi)
except mvsdk.CameraException as e:
if e.error_code != mvsdk.CAMERA_STATUS_TIME_OUT:
print("CameraGetImageBuffer failed({}): {}".format(e.error_code, e.message) )
# 关闭相机
mvsdk.CameraUnInit(hCamera)
# 释放帧缓存
mvsdk.CameraAlignFree(pFrameBuffer)
def main():
try:
main_loop()
finally:
cv2.destroyAllWindows()
main()

View File

@@ -29,24 +29,25 @@ def max_pool_2x2(x):
CONV1_KERNAL_SIZE = 5 CONV1_KERNAL_SIZE = 5
# 第一层卷积输出通道数 # 第一层卷积输出通道数
CONV1_OUTPUT_CHANNELS = 8 CONV1_OUTPUT_CHANNELS = 6
# 第二层卷积核大小 # 第二层卷积核大小
CONV2_KERNAL_SIZE = 3 CONV2_KERNAL_SIZE = 3
# 第二层卷积输出通道数 # 第二层卷积输出通道数
CONV2_OUTPUT_CHANNELS = 16 CONV2_OUTPUT_CHANNELS = 10
# 第一层全连接宽度 # 第一层全连接宽度
FC1_OUTPUT_NODES = 16 FC1_OUTPUT_NODES = 16
# 第二层全连接宽度(输出标签类型数) # 第二层全连接宽度(输出标签类型数)
FC2_OUTPUT_NODES = 15 FC2_OUTPUT_NODES = 15
# 输出标签类型数 # 输出标签类型数
OUTPUT_NODES = FC2_OUTPUT_NODES OUTPUT_NODES = FC2_OUTPUT_NODES
def forward(x, regularizer=None): def forward(x, regularizer=None, keep_rate=tf.constant(1.0)):
vars = [] vars = []
nodes = [] nodes = []
@@ -71,16 +72,19 @@ def forward(x, regularizer=None):
pool_shape = pool2.get_shape().as_list() pool_shape = pool2.get_shape().as_list()
node = pool_shape[1] * pool_shape[2] * pool_shape[3] node = pool_shape[1] * pool_shape[2] * pool_shape[3]
reshaped = tf.reshape(pool2, [-1, node]) reshaped = tf.reshape(pool2, [-1, node])
reshaped = tf.nn.dropout(reshaped, keep_rate)
fc1_w = tf.nn.dropout(get_weight([node, FC1_OUTPUT_NODES], regularizer), 0.1) fc1_w = get_weight([node, FC1_OUTPUT_NODES], regularizer)
fc1_b = get_bias([FC1_OUTPUT_NODES]) fc1_b = get_bias([FC1_OUTPUT_NODES])
fc1 = tf.nn.relu(tf.matmul(reshaped, fc1_w) + fc1_b) fc1 = tf.nn.relu(tf.matmul(reshaped, fc1_w) + fc1_b)
fc1 = tf.nn.dropout(fc1, keep_rate)
vars.extend([fc1_w, fc1_b]) vars.extend([fc1_w, fc1_b])
nodes.extend([fc1]) nodes.extend([fc1])
fc2_w = tf.nn.dropout(get_weight([FC1_OUTPUT_NODES, FC2_OUTPUT_NODES], regularizer), 0.1) fc2_w = get_weight([FC1_OUTPUT_NODES, FC2_OUTPUT_NODES], regularizer)
fc2_b = get_bias([FC2_OUTPUT_NODES]) fc2_b = get_bias([FC2_OUTPUT_NODES])
fc2 = tf.nn.softmax(tf.matmul(fc1, fc2_w) + fc2_b) # fc2 = tf.nn.softmax(tf.matmul(fc1, fc2_w) + fc2_b)
fc2 = tf.matmul(fc1, fc2_w) + fc2_b
vars.extend([fc2_w, fc2_b]) vars.extend([fc2_w, fc2_b])
nodes.extend([fc2]) nodes.extend([fc2])

View File

@@ -2,10 +2,8 @@ import numpy as np
import os import os
import cv2 import cv2
import random import random
from forward import OUTPUT_NODES
import sys
import os
from tqdm import tqdm from tqdm import tqdm
from forward import OUTPUT_NODES
# 原图像行数 # 原图像行数
SRC_ROWS = 36 SRC_ROWS = 36
@@ -24,7 +22,7 @@ class DataSet:
self.test_labels = [] self.test_labels = []
self.generate_data_sets(folder) self.generate_data_sets(folder)
def file2nparray(self, name, random=False): def file2nparray(self, name):
image = cv2.imread(name) image = cv2.imread(name)
image = cv2.resize(image, (SRC_COLS, SRC_ROWS)) image = cv2.resize(image, (SRC_COLS, SRC_ROWS))
image = image.astype(np.float32) image = image.astype(np.float32)
@@ -42,16 +40,12 @@ class DataSet:
files = os.listdir(dir) files = os.listdir(dir)
for file in tqdm(files, postfix={"loading id": i}, dynamic_ncols=True): for file in tqdm(files, postfix={"loading id": i}, dynamic_ncols=True):
if file[-3:] == "jpg": if file[-3:] == "jpg":
try:
if random.random() > 0.2: if random.random() > 0.2:
self.train_samples.append(self.file2nparray("%s/%s" % (dir, file))) self.train_samples.append(self.file2nparray("%s/%s" % (dir, file)))
self.train_labels.append(self.id2label(i)) self.train_labels.append(self.id2label(i))
else: else:
self.test_samples.append(self.file2nparray("%s/%s" % (dir, file))) self.test_samples.append(self.file2nparray("%s/%s" % (dir, file)))
self.test_labels.append(self.id2label(i)) self.test_labels.append(self.id2label(i))
except:
print("%s/%s" % (dir, file))
continue
self.train_samples = np.array(self.train_samples) self.train_samples = np.array(self.train_samples)
self.train_labels = np.array(self.train_labels) self.train_labels = np.array(self.train_labels)
self.test_samples = np.array(self.test_samples) self.test_samples = np.array(self.test_samples)
@@ -67,15 +61,6 @@ class DataSet:
labels.append(self.train_labels[id]) labels.append(self.train_labels[id])
return np.array(samples), np.array(labels) return np.array(samples), np.array(labels)
def sample_test_sets(self, length):
samples = []
labels = []
for i in range(length):
id = random.randint(0, len(self.test_samples)-1)
samples.append(self.test_samples[id])
labels.append(self.test_labels[id])
return np.array(samples), np.array(labels)
def all_train_sets(self): def all_train_sets(self):
return self.train_samples[:], self.train_labels[:] return self.train_samples[:], self.train_labels[:]

111
tools/TrainCNN/grab.py Normal file
View File

@@ -0,0 +1,111 @@
#coding=utf-8
import mvsdk
def main():
# 枚举相机
DevList = mvsdk.CameraEnumerateDevice()
nDev = len(DevList)
if nDev < 1:
print("No camera was found!")
return
for i, DevInfo in enumerate(DevList):
print("{}: {} {}".format(i, DevInfo.GetFriendlyName(), DevInfo.GetPortType()))
i = 0 if nDev == 1 else int(input("Select camera: "))
DevInfo = DevList[i]
print(DevInfo)
# 打开相机
hCamera = 0
try:
hCamera = mvsdk.CameraInit(DevInfo, -1, -1)
except mvsdk.CameraException as e:
print("CameraInit Failed({}): {}".format(e.error_code, e.message) )
return
# 获取相机特性描述
cap = mvsdk.CameraGetCapability(hCamera)
PrintCapbility(cap)
# 判断是黑白相机还是彩色相机
monoCamera = (cap.sIspCapacity.bMonoSensor != 0)
# 黑白相机让ISP直接输出MONO数据而不是扩展成R=G=B的24位灰度
if monoCamera:
mvsdk.CameraSetIspOutFormat(hCamera, mvsdk.CAMERA_MEDIA_TYPE_MONO8)
# 相机模式切换成连续采集
mvsdk.CameraSetTriggerMode(hCamera, 0)
# 手动曝光曝光时间30ms
mvsdk.CameraSetAeState(hCamera, 0)
mvsdk.CameraSetExposureTime(hCamera, 30 * 1000)
# 让SDK内部取图线程开始工作
mvsdk.CameraPlay(hCamera)
# 计算RGB buffer所需的大小这里直接按照相机的最大分辨率来分配
FrameBufferSize = cap.sResolutionRange.iWidthMax * cap.sResolutionRange.iHeightMax * (1 if monoCamera else 3)
# 分配RGB buffer用来存放ISP输出的图像
# 备注从相机传输到PC端的是RAW数据在PC端通过软件ISP转为RGB数据如果是黑白相机就不需要转换格式但是ISP还有其它处理所以也需要分配这个buffer
pFrameBuffer = mvsdk.CameraAlignMalloc(FrameBufferSize, 16)
# 从相机取一帧图片
try:
pRawData, FrameHead = mvsdk.CameraGetImageBuffer(hCamera, 2000)
mvsdk.CameraImageProcess(hCamera, pRawData, pFrameBuffer, FrameHead)
mvsdk.CameraReleaseImageBuffer(hCamera, pRawData)
# 此时图片已经存储在pFrameBuffer中对于彩色相机pFrameBuffer=RGB数据黑白相机pFrameBuffer=8位灰度数据
# 该示例中我们只是把图片保存到硬盘文件中
status = mvsdk.CameraSaveImage(hCamera, "./grab.bmp", pFrameBuffer, FrameHead, mvsdk.FILE_BMP, 100)
if status == mvsdk.CAMERA_STATUS_SUCCESS:
print("Save image successfully. image_size = {}X{}".format(FrameHead.iWidth, FrameHead.iHeight) )
else:
print("Save image failed. err={}".format(status) )
except mvsdk.CameraException as e:
print("CameraGetImageBuffer failed({}): {}".format(e.error_code, e.message) )
# 关闭相机
mvsdk.CameraUnInit(hCamera)
# 释放帧缓存
mvsdk.CameraAlignFree(pFrameBuffer)
def PrintCapbility(cap):
for i in range(cap.iTriggerDesc):
desc = cap.pTriggerDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iImageSizeDesc):
desc = cap.pImageSizeDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iClrTempDesc):
desc = cap.pClrTempDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iMediaTypeDesc):
desc = cap.pMediaTypeDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iFrameSpeedDesc):
desc = cap.pFrameSpeedDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iPackLenDesc):
desc = cap.pPackLenDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iPresetLut):
desc = cap.pPresetLutDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iAeAlmSwDesc):
desc = cap.pAeAlmSwDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iAeAlmHdDesc):
desc = cap.pAeAlmHdDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iBayerDecAlmSwDesc):
desc = cap.pBayerDecAlmSwDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
for i in range(cap.iBayerDecAlmHdDesc):
desc = cap.pBayerDecAlmHdDesc[i]
print("{}: {}".format(desc.iIndex, desc.GetDescription()) )
main()

2344
tools/TrainCNN/mvsdk.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,7 @@
8 6
0.026843265 2.196893
0.13687223 0.07216131
0.355584 0.30069783
-2.171335 -0.4587247
2.0351274 0.25476167
1.8288306 -0.07236218
-4.113487
-4.7374034

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,11 @@
16 10
1.252942 -0.20609309
8.216776 -0.22031759
-0.25801975 0.1367356
0.23331891 0.3687642
-1.0068187 0.41563538
-1.1067235 0.56676525
-0.40771145 -0.18027179
-0.43731463 0.23183917
-1.4359887 -0.42312288
-0.2637226 -0.071102634
-0.41042513
5.653234
11.668375
-1.4110142
-0.1370871
-0.19704156

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,17 @@
16 16
-0.20538531 0.58729273
7.367273 -0.46309644
-0.18452525 -0.16430101
6.532006 0.43460655
25.536476 -0.12165885
-0.18481636 -0.23968913
-7.2863836 2.1033192
-0.106642306 -0.19900312
-13.070918 -0.0075173783
-0.20218277 0.05968375
12.15478 -0.13455966
-0.28686985 -0.203078
-0.0753381 3.4187536
-0.18774705 -0.17911159
-0.45540679 0.2670588
-0.81279093 -0.58640796

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,16 @@
11 15
3.028916 3.3147552
1.399315 0.06590654
12.311913 -0.37396204
1.9181013 -0.22522521
-6.701019 -0.9601034
4.332221 -0.9866448
-1.2238123 -0.091494516
3.367433 0.08088531
-12.5565 -0.87962383
-5.421737 -0.5273214
0.19371712 -0.18194006
-0.035499398
-1.7873636
0.48932117
0.20472674

View File

@@ -1,178 +1,242 @@
16 16
11 15
-6.6445126e-34 0.032564595
-3.249375e-34 0.2617493
1.1159086e-33 -0.32648245
3.8638357e-34 -0.09931708
1.1999902e-33 -0.24592394
-4.773489e-34 -0.046472594
6.573301e-35 0.33926198
-2.7309886e-34 -0.12294629
-3.5826868e-34 0.35003394
-2.058676e-35 -0.21551898
2.9253375e-34 -0.017696692
-0.028950384 -0.14498983
0.6513481 -0.1035376
-0.090478554 0.38845813
-0.20445836 -0.025425002
-0.033577178 0.009146354
0.07797232 -0.10837719
0.54864347 -0.26169685
-0.29817155 0.24757256
-0.0972982 0.12278806
-0.06013593 0.173229
-0.12082546 -0.13405079
3.2078713e-35 -0.12579814
9.031606e-34 -0.055770937
2.9989992e-34 -0.18405183
-1.8143521e-34 0.28358203
-4.3935644e-34 0.07445254
-7.0448736e-34 0.23714246
7.1789805e-35 -0.13335316
-1.0773237e-33 0.010074598
4.1924878e-35 8.8978056e-36
2.6375152e-35 -3.2673058e-35
7.6904e-34 -5.345727e-35
0.7534392 -1.2187582e-35
0.041451767 2.1464323e-35
-0.16353445 6.242724e-35
-0.060047485 -1.5923025e-35
0.00937684 8.5710344e-35
-0.046534266 -1.0859142e-34
0.12362613 3.2036078e-35
-0.15848428 9.889982e-35
-0.24788214 9.5151974e-35
-0.10429883 -2.9864197e-35
0.07533859 4.3109238e-35
-0.059416637 -3.5075268e-35
-0.41185078 0.056317
-0.17300163 -0.041549474
-0.5048911 -0.07867665
-0.27550554 -0.18685594
-0.3164118 0.0036230602
0.41110015 0.26440525
0.5305193 -0.040618088
0.54406047 -0.011269322
0.41247433 -0.037696317
0.37498224 0.01897098
-1.8530121e-34 -0.12073718
9.619048e-34 0.017303245
-9.084177e-35 0.33418366
-6.3287863e-34 0.0023682562
-5.0406337e-34 0.02849121
6.430404e-34 5.652079e-35
3.175955e-34 -2.3405746e-35
4.679148e-34 7.754459e-35
-9.705965e-35 -7.9132345e-35
-1.4937167e-34 -3.652418e-35
-3.7373778e-34 2.7482412e-35
-1.8647024e-34 -5.490258e-35
2.157429e-34 -4.0657551e-35
9.178287e-35 -8.77158e-35
2.0542673e-34 -1.6310674e-35
4.1186567e-34 6.9110407e-35
-1.4028581e-34 2.8374646e-35
-1.9601842e-35 7.249574e-35
1.2199764e-34 -8.326536e-36
6.605314e-36 -4.2208914e-35
4.5839516e-35 3.380163e-07
4.1222883e-34 0.00018250165
-5.649719e-34 -4.091571e-13
6.8534397e-34 -5.742375e-20
1.5284239e-34 1.9097627e-29
4.6217582e-35 -1.0989738e-34
-2.6860813e-34 4.988384e-06
6.4033865e-34 -6.4157313e-25
-1.9073337e-34 0.0046886215
-4.5628154e-34 -4.15727e-06
-2.5596114e-34 8.565781e-06
3.5286568e-34 1.3159001e-08
-4.590898e-34 -6.0661813e-27
-0.016765846 0.003999361
0.011994723 4.6603424e-12
-0.26132298 0.05875436
0.52835166 0.1978433
-0.21429977 0.2357523
0.047839653 0.26039347
0.0091085555 0.29742035
-0.27048072 0.23000301
0.35106397 0.22130986
-0.05962828 0.32969925
-0.06534093 -0.25387922
-5.9855516e-34 -0.21843708
-3.8872762e-34 -0.35505614
1.4836724e-34 -0.18760061
-3.7528057e-34 -0.26636004
9.244409e-35 -0.3437664
3.8288393e-34 -0.31676972
1.7450431e-34 -2.9646424e-21
-2.1571653e-34 1.6212045e-32
-8.635735e-34 -4.972171e-35
-1.1816434e-33 3.321333e-35
2.75913e-34 -3.0660306e-36
-0.11307323 -1.2462985e-35
-0.05993526 -6.15827e-35
-0.13786606 -7.708171e-35
0.0066387164 -8.6527984e-35
0.0024843283 -9.63909e-35
0.59352225 -4.329017e-36
-0.13324556 -1.6798441e-35
-0.275834 6.4576066e-36
-0.13921 1.0103299e-34
-0.023196468 5.888647e-35
0.5097328 -7.013437e-07
-3.878958e-34 -4.0569785e-06
-3.7806562e-34 -1.6326982e-07
-7.8518477e-35 -5.024649e-09
-3.8417675e-35 1.0218174e-08
5.504886e-34 -5.870887e-17
-4.2347166e-34 2.4735778e-05
3.77638e-34 -1.8678125e-28
-6.449212e-34 -5.81377e-18
3.723454e-34 4.2194547e-08
-3.4782797e-34 -8.974654e-09
-7.3213066e-35 -8.819107e-18
-4.4892873e-34 -4.0505355e-36
3.4874208e-34 1.4275389e-15
1.1700748e-33 2.5213077e-35
-1.1355761e-34 0.0455311
1.1225075e-33 -0.16154826
8.598829e-34 -0.12516226
-4.3217242e-35 -0.15351692
-2.7770687e-34 -0.15327676
-4.541627e-34 -0.101601385
2.895937e-34 -0.09675424
5.4065008e-34 -0.009882243
8.211584e-34 0.14380045
2.4092055e-34 0.17609678
-1.1384675e-33 0.15136641
6.7052264e-34 0.18814708
-8.305206e-34 0.14553012
-1.8370869e-34 0.08837449
-5.012333e-34 -0.033248488
9.2541105e-34 -5.5479194e-13
5.402706e-34 -1.301003e-35
-3.0262877e-34 6.407329e-35
7.088514e-34 1.9132001e-35
-1.7485143e-34 9.564731e-15
-2.366834e-34 -4.683806e-19
6.501108e-34 1.8975264e-24
-5.722031e-34 2.1182613e-16
1.1429626e-33 -3.6244807e-35
4.9021696e-35 8.7545505e-28
1.1040688e-34 6.0832183e-21
-1.0464325e-33 -8.545767e-31
1.6525106e-34 5.2584422e-14
-3.9707304e-34 2.1925994e-22
2.1401144e-34 -3.8261837e-20
-0.051736742 -1.0880043e-34
-0.042417962 -6.6652585e-35
0.051013805 -6.389439e-35
0.16345194 3.2505208e-35
0.5187456 -4.973718e-35
-0.14417858 -3.3143227e-35
-0.0539816 -8.6873607e-35
-0.15638705 -7.993331e-35
-0.30926377 -7.852833e-36
0.42976364 -6.3270696e-35
-0.029886993 5.258114e-35
2.1151958e-35
7.3324824e-35
7.1793427e-35
-9.0051764e-35
0.3847243
-0.008498104
0.030268772
-0.13264672
0.030948505
-0.07938414
-0.04668712
-0.16404602
0.07313376
0.1522345
0.00048681977
0.01413009
-0.09327267
-0.055540953
-0.100918815
-0.06997617
-0.100417346
-0.11185439
-0.108010836
-0.1542093
-0.026418801
-0.0976361
0.2631115
0.110037416
-0.038920112
0.03310242
-0.07849237
0.087744445
-0.016706102
0.42764086
-0.063509755
-0.20127158
0.3405362
0.10242782
-0.25828084
-0.18461828
0.18166192
-0.13771532
-0.14198124
0.4270196
0.16850737
-0.13088605
-0.18872121
0.22758731
-0.07991603
-0.06114433
0.22688313
0.013428835
-0.12416983
0.32349384
-0.081210054
-0.33148897
-0.1045747
0.20436902
0.018065251
-0.15008682
0.3795789
-0.022265602
-0.2928385
0.012199368