diff --git a/ocr_test/ReadMe.txt b/ocr_test/ReadMe.txt new file mode 100644 index 0000000000000000000000000000000000000000..70bd4063dd09b16481e392154fbecc4c6c131e7b --- /dev/null +++ b/ocr_test/ReadMe.txt @@ -0,0 +1,35 @@ +======================================================================== + 控制台应用程序:ocr_test 项目概述 +======================================================================== + +应用程序向导已为您创建了此 ocr_test 应用程序。 + +本文件概要介绍组成 ocr_test 应用程序的每个文件的内容。 + + +ocr_test.vcxproj + 这是使用应用程序向导生成的 VC++ 项目的主项目文件,其中包含生成该文件的 Visual C++ 的版本信息,以及有关使用应用程序向导选择的平台、配置和项目功能的信息。 + +ocr_test.vcxproj.filters + 这是使用“应用程序向导”生成的 VC++ 项目筛选器文件。它包含有关项目文件与筛选器之间的关联信息。在 IDE 中,通过这种关联,在特定节点下以分组形式显示具有相似扩展名的文件。例如,“.cpp”文件与“源文件”筛选器关联。 + +ocr_test.cpp + 这是主应用程序源文件。 + +///////////////////////////////////////////////////////////////////////////// +其他标准文件: + +StdAfx.h, StdAfx.cpp + 这些文件用于生成名为 ocr_test.pch 的预编译头 (PCH) 文件和名为 StdAfx.obj 的预编译类型文件。 + +///////////////////////////////////////////////////////////////////////////// +其他注释: + +应用程序向导使用“TODO:”注释来指示应添加或自定义的源代码部分。 + +///////////////////////////////////////////////////////////////////////////// +附加库: +libjpeg.lib +libpng.lib +zlib.lib +/////////////////////////////////////////////////////////////////////// diff --git a/ocr_test/bktree.cpp b/ocr_test/bktree.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8150b8bce143d7d48f9983c189a44eedd47b8150 --- /dev/null +++ b/ocr_test/bktree.cpp @@ -0,0 +1,164 @@ +#include +#include +#include +#include "bktree.h" + +static int write_string(BKTree * bktree, char * string, unsigned char len); +static BKNode * write_new_record(BKTree * bktree, char * string, unsigned char len); + +BKTree * bktree_new(int (* distance)(char *, int, char *, int, int)) { + BKTree * bktree = (BKTree *)malloc(sizeof(BKTree)); + + bktree->tree_size = BKTREE_TREE_SIZE; + bktree->tree = (BKNode *)malloc(bktree->tree_size); + bktree->tree_cursor = bktree->tree; + + bktree->strings_size = BKTREE_STRINGS_SIZE; + bktree->strings = (char*)malloc(bktree->strings_size); + bktree->strings_cursor = bktree->strings; + + bktree->size = 0; + + bktree->distance = distance; + + return bktree; +} + +void bktree_destroy(BKTree * bktree) { + free(bktree->tree); + free(bktree->strings); + free(bktree); +} + +BKNode * bktree_add(BKTree * bktree, char * string, unsigned char len) { + if(len > BKTREE_STRING_MAX || len == 0) + return NULL; + + if(bktree->size == 0) { + return write_new_record(bktree, string, len); + } + + BKNode * node = (BKNode *) bktree->tree; + while(node) { + char * node_str = BKTREE_GET_STRING(bktree, node->string_offset); + int node_str_len = BKTREE_GET_STRING_LEN(bktree, node->string_offset); + + int d = bktree->distance(node_str, node_str_len, string, len, -1); + + if(d == 0) + return BKTREE_OK; + + if(node->next[d] > 0) { + node = bktree->tree + node->next[d]; + } else { + BKNode * new_node = write_new_record(bktree, string, len); + node->next[d] = new_node - bktree->tree; + return new_node; + } + } + + return NULL; +} + +// BKResult * bktree_result_new(BKResult * next, BKNode * node, int distance) { +// BKResult * result = (BKResult *)malloc(sizeof(BKResult)); +// result->next = next; +// result->distance = distance; +// result->string_offset = node->string_offset; +// +// return result; +// } + + +void inner_query(BKTree * bktree, BKNode * node, char * string, unsigned char len, int max, std::vector& res) { + + int d = bktree->distance(BKTREE_GET_STRING(bktree, node->string_offset), BKTREE_GET_STRING_LEN(bktree, node->string_offset), string, len, -1); + + int start = d - max < 1 ? 1 : d - max; + int stop = d + max + 1; + if(stop >= BKTREE_STRING_MAX) + stop = BKTREE_STRING_MAX - 1; + + if(d <= max) { + // *result_ptr = bktree_result_new(*result_ptr, node, d); + BKResult r; + r.distance = d; + int len = bktree->strings[node->string_offset]; + char* start = bktree->strings + node->string_offset + 1; + char* end = start + len; + r.str = std::string(start,end); + res.push_back(r); + } + + int i; + for(i = start; i <= stop; i++) { + if(node->next[i] > 0) { + inner_query(bktree, bktree->tree + node->next[i], string, len, max, res); + } + } +} + +std::vector bktree_query(BKTree * bktree, char * string, unsigned char len, int max) { + std::vector res; + inner_query(bktree, bktree->tree, string, len, max, res); + return res; +} + +void bktree_node_print(BKTree * bktree, BKNode * node) { + if(bktree == NULL) { + printf("bktree is null\n"); + return; + } + + if(node == NULL) { + printf("node is null\n"); + return; + } + + printf("String: %s\n", BKTREE_GET_STRING(bktree, node->string_offset)); + printf("Offset: %ld\n", node - bktree->tree); + int i; + for(i = 0; i < BKTREE_STRING_MAX; i++) + printf("%d ", node->next[i]); + + printf("\n"); +} + +static int write_string(BKTree * bktree, char * string, unsigned char len) { + while(bktree->strings_cursor - bktree->strings + len + 2 >= bktree->strings_size) { + int cursor_offset = bktree->strings_cursor - bktree->strings; + + char * old_strings = bktree->strings; + bktree->strings = (char*)malloc(bktree->strings_size * 2); + memcpy(bktree->strings, old_strings, bktree->strings_size); + free(old_strings); + + //printf("old ptr: %p\n", old_strings); + //printf("new ptr: %p\n", bktree->strings); + + bktree->strings_size *= 2; + bktree->strings_cursor = bktree->strings + cursor_offset; + } + + int original_offset = bktree->strings_cursor - bktree->strings; + + *(bktree->strings_cursor) = len; + memcpy(bktree->strings_cursor + 1, string, len); + *(bktree->strings_cursor + len + 1) = '\0'; + bktree->strings_cursor += len + 2; + + return original_offset; +} + +static BKNode * write_new_record(BKTree * bktree, char * string, unsigned char len) { + BKNode * node = bktree->tree_cursor++; + node->string_offset = write_string(bktree, string, len); + + int i; + for(i = 0; i < BKTREE_STRING_MAX; i++) + node->next[i] = 0; + + bktree->size++; + + return node; +} \ No newline at end of file diff --git a/ocr_test/bktree.h b/ocr_test/bktree.h new file mode 100644 index 0000000000000000000000000000000000000000..1c77e09f0f26dd4d9ada6c937627e856b6a35bbc --- /dev/null +++ b/ocr_test/bktree.h @@ -0,0 +1,49 @@ +#define BKTREE_STRINGS_SIZE 4096 +#define BKTREE_TREE_SIZE 1147483648 + +#define BKTREE_STRING_MAX 24 + +#define BKTREE_OK 0 +#define BKTREE_FAIL 1 + +#define BKTREE_GET_STRING(bktree, string_offset) (bktree->strings + string_offset + 1) + +#define BKTREE_GET_STRING_LEN(bktree, string_offset) (*(bktree->strings + string_offset)) + +#include +#include + +typedef struct { + long string_offset; + int next[BKTREE_STRING_MAX]; +} BKNode; + +typedef struct { + int size; + + BKNode * tree; + BKNode * tree_cursor; + size_t tree_size; + + char * strings; + char * strings_cursor; + size_t strings_size; + + // word1, len(word1), word2, len(word2), max + int (* distance)(char *, int, char *, int, int); +} BKTree; + +struct BKResult_s { + int distance; + std::string str; + //struct BKResult_s * next; +}; +typedef struct BKResult_s BKResult; + + +BKTree * bktree_new(int (* distance)(char *, int, char *, int, int)); +void bktree_destroy(BKTree * bktree); +BKNode * bktree_add(BKTree * bktree, char * string, unsigned char len); +void bktree_node_print(BKTree * bktree, BKNode * node); + +std::vector bktree_query(BKTree * bktree, char * string, unsigned char len, int max); \ No newline at end of file diff --git a/ocr_test/common.h b/ocr_test/common.h new file mode 100644 index 0000000000000000000000000000000000000000..33cc5b64d2c3db3c43e444add84b85e4a7e6c5b6 --- /dev/null +++ b/ocr_test/common.h @@ -0,0 +1,1824 @@ +#pragma once + +#define _CRT_SECURE_NO_WARNINGS +#define OPENCV +#include + + + + +#include "parser.h" + + +#include"opencv/cv.h" +#include"opencv/highgui.h" +#include"opencv/cxcore.h" + + + +#include"math.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +using namespace caffe; // NOLINT(build/namespaces) +using std::string; + + + +using namespace cv; +using namespace std; + + + + +#define showSteps 0 +#define showdemo 1 +#define USE_OPENCV + + + + +int notfound = 2; +int readtype = 0;//1Ϊ2Ϊַ + +CascadeClassifier car_cascade; +CascadeClassifier char_cascade; +vector file_name, img_path; +vector imgfileNames; + + + +void FastFilter(IplImage *img, double sigma) +{ + int filter_size; + + // Reject unreasonable demands + if (sigma > 200) sigma = 200; + + // get needed filter size (enforce oddness) + filter_size = (int)floor(sigma * 6) / 2; + filter_size = filter_size * 2 + 1; + + // If 3 sigma is less than a pixel, why bother (ie sigma < 2/3) + if (filter_size < 3) return; + + // Filter, or downsample and recurse + /*if (filter_size < 10) { + + #ifdef USE_EXACT_SIGMA + FilterGaussian(img, sigma) + #else + cvSmooth( img, img, CV_GAUSSIAN, filter_size, filter_size ); + #endif + + } + else*/ { + if (img->width < 2 || img->height < 2) return; + + IplImage* sub_img = cvCreateImage(cvSize(img->width / 2, img->height / 2), img->depth, img->nChannels); + + cvPyrDown(img, sub_img); + + FastFilter(sub_img, sigma / 2.0); + + cvResize(sub_img, img, CV_INTER_LINEAR); + + cvReleaseImage(&sub_img); + } + +} + +void MultiScaleRetinex(IplImage *img, int scales, double *weights, double *sigmas, int gain, int offset) +{ + int i; + double weight; + IplImage *A, *fA, *fB, *fC; + + // Initialize temp images + fA = cvCreateImage(cvSize(img->width, img->height), IPL_DEPTH_32F, img->nChannels); + fB = cvCreateImage(cvSize(img->width, img->height), IPL_DEPTH_32F, img->nChannels); + fC = cvCreateImage(cvSize(img->width, img->height), IPL_DEPTH_32F, img->nChannels); + + + // Compute log image + cvConvert(img, fA); + cvLog(fA, fB); + + // Normalize according to given weights + for (i = 0, weight = 0; i < scales; i++) + weight += weights[i]; + + if (weight != 1.0) cvScale(fB, fB, weight); + + // Filter at each scale + for (i = 0; i < scales; i++) { + A = cvCloneImage(img); + FastFilter(A, sigmas[i]); + + cvConvert(A, fA); + cvLog(fA, fC); + cvReleaseImage(&A); + + // Compute weighted difference + cvScale(fC, fC, weights[i]); + cvSub(fB, fC, fB); + } + + // Restore + cvConvertScale(fB, img, gain, offset); + + // Release temp images + cvReleaseImage(&fA); + cvReleaseImage(&fB); + cvReleaseImage(&fC); +} + + + + +int init_detect(CascadeClassifier &car_cascade, CascadeClassifier &char_cascade) { + + + readtype = 2; + if (!char_cascade.load("cascade12.xml")) + { + cerr << "ERROR: Could not load classifier cascade" << endl; + return -1; + } + + readtype = 1; + if (!car_cascade.load("cascade_11_plane_card_20160912.xml")) + { + cerr << "ERROR: Could not load classifier cascade" << endl; + return -1; + } +} + +void InsertSort(int a[], int count) +{ + int i, j, temp; + for (i = 1; itemp && j >= 0) + { + a[j + 1] = a[j]; + j--; + } + if (j != (i - 1)) + a[j + 1] = temp; + } +} + +int SizeOfRect(const CvRect& rect) // +{ + return rect.height*rect.width; +} + +CvRect IntersectRect(CvRect result_i, CvRect resmax_j) { // + CvRect rectInter; + + + rectInter.x = max(result_i.x, resmax_j.x); + rectInter.y = max(result_i.y, resmax_j.y); + + int xxx = min((result_i.x + result_i.width), (resmax_j.x + resmax_j.width)); + int yyy = min((result_i.y + result_i.height), (resmax_j.y + resmax_j.height)); + + rectInter.width = xxx - rectInter.x; + rectInter.height = yyy - rectInter.y; + + return rectInter; +} + +CvRect UnionRect(CvRect resmax_j, CvRect result_i) { // + CvRect resmax_jj; + + resmax_jj.x = min(result_i.x, resmax_j.x); + resmax_jj.y = min(result_i.y, resmax_j.y); + + int xxx = max((result_i.x + result_i.width), (resmax_j.x + resmax_j.width)); + int yyy = max((result_i.y + result_i.height), (resmax_j.y + resmax_j.height)); + + resmax_jj.width = xxx - resmax_j.x; + resmax_jj.height = yyy - resmax_j.y; + + return resmax_jj; + +} + +vector roichoose(vectordetectROI, Mat choose_detect_obj) { + /***************δɸѡ*******************/ + int image_width = choose_detect_obj.cols; + int image_height = choose_detect_obj.rows; + int judgeheight = image_height*0.3; + vector> b;//ƵȼĴ洢λ + vector d;//Ƽ洢λ + // vector shanchu;//¼Ҫɾĵ + for (int i = detectROI.size() - 1; i > -1; i--) + { + if (detectROI[i].height < judgeheight) + // shanchu.push_back(i); + detectROI.erase(detectROI.begin() + i); + } + + /*******************************/ + if (showSteps) { + for (int i = 0; i < detectROI.size(); i++) { + printf("x: %d \n", detectROI[i].x); + } + } + for (int i = 0; i < detectROI.size(); i++)//ѭiָʼ㣬j + { + if (detectROI[i].height < judgeheight) + continue; + for (int j = 10; j < (image_width / 7); j++)//jΪһΧڵĿ + { + /*************/ + + + + /*************/ + + int k = 1;//k + int startpoint = detectROI[i].x; + int startpoint_y = detectROI[i].y; + int min_distance = 2 * image_width; //¼λõļ + int bestpoint = detectROI.size() + 1; //¼ѵ + vector grouppoint;//ÿһѵĴ洢λ + grouppoint.push_back(detectROI[i]); + + for (int ii = i + 1; ii < detectROI.size(); ii++)//Ѱڵŵ + { + if (detectROI[ii].height < judgeheight) + continue; + int distance_x = abs(j + startpoint - detectROI[ii].x); + if (distance_x < 0.5*j) //ѡȡxֵʵĵ㣬ȽλõľѡС + { + int distance = (distance_x)*(distance_x)+0.7*(detectROI[ii].y - startpoint_y)*(detectROI[ii].y - startpoint_y);//Ǹx꣬yIJֵӰȨؽ + distance = sqrt(distance); + if (distance < min_distance) + { + min_distance = distance; + bestpoint = ii; + + } + } + } + /*************/ + // printf("min_distance=%d", min_distance); + + /*******************************************4-10޸ ںĿyֵԤλbestpoint_y*************************************/ + while (bestpoint < (detectROI.size() + 1) && min_distance < 2 * j) + { + k++; + grouppoint.push_back(detectROI[bestpoint]); + startpoint = detectROI[bestpoint].x;//ʼλ + int startpoint_y2 = detectROI[bestpoint].y;//y2 + min_distance = image_width; + bestpoint = detectROI.size() + 1; + + for (int ii = i + 1; ii < detectROI.size(); ii++)//Ѱڵŵ + { + int distance_x = abs(j + startpoint - detectROI[ii].x); + int best_y = 2 * startpoint_y2 - startpoint_y;//еy + int distance_y = abs(detectROI[ii].y - best_y); + + if (distance_x < 0.5*j) //ѡȡxֵʵĵ㣬ȽλõľѡС + { + int distance = (distance_x)*(distance_x)+0.7*distance_y*distance_y;//Ǹx꣬yIJֵӰȨؽ + distance = sqrt(distance); + if (distance < min_distance) + { + min_distance = distance; + bestpoint = ii; + + + } + } + } + startpoint_y = startpoint_y2;//y1 + + } + + + + + if (grouppoint.size() > 3) + { + /*if (showSteps) + { + for (int testshow = 0; testshow < grouppoint.size(); testshow++){ + printf("x=%d y=%d width=%d height=%d \n", grouppoint[testshow].x, grouppoint[testshow].y, grouppoint[testshow].width, grouppoint[testshow].height); + } + getchar(); + }*/ + + b.push_back(grouppoint); + d.push_back(j); + } + + } + } + + + //λƵȼ򣬽ԱȽѡŽ + + double sum_min = 100000; + int bestgroup = 0; + if (b.size() > 2) + { + for (int i = 0; i < b.size(); i++) //ÿһsumֵ,ѡbestgroup + { + double sum = 0; + + for (int j = 1; j < b[i].size(); j++) + { + int dis = d[i]; + sum = sum + (b[i][j].x - b[i][j - 1].x - dis)*(b[i][j].x - b[i][j - 1].x - dis) + 4 * (b[i][j].y - b[i][j - 1].y)*(b[i][j].y - b[i][j - 1].y) + 0.5*(b[i][j].height - b[i][j - 1].height)*(b[i][j].height - b[i][j - 1].height) + 0.5*(b[i][j].width - b[i][j - 1].width)*(b[i][j].width - b[i][j - 1].width); + } + sum = sum / b[i].size(); + sum = sum / (1 + pow(b[i].size(), 2)); //н + + // printf("b[i].size=%d sum=%f \n", b[i].size(),sum); + if (sum < sum_min) + { + sum_min = sum; + bestgroup = i; + } + } + // printf("size=%d distacne=%d sum_min=%f \n", b[bestgroup].size(), d[bestgroup], sum_min); + // return b[bestgroup]; + /*****************bestgroupĻҵǰĿ********/ + int bestnumber = b[bestgroup].size(); + int bestx0 = b[bestgroup][0].x;//յ + int besty0 = b[bestgroup][0].y; + int bestx1 = b[bestgroup][bestnumber - 1].x; + int besty1 = b[bestgroup][bestnumber - 1].y; + int bestheight = 0; + int bestw = 0; + for (int i = 0; i < bestnumber; i++) { + bestheight = bestheight + b[bestgroup][0].height; + bestw = bestw + b[bestgroup][0].width; + } + + bestheight = bestheight / bestnumber; + bestw = bestw / bestnumber; + int bestwidth = d[bestgroup]; + /********************************ǰ*****************************/ + int search_startpoint = bestx0; + int search_starty = besty0; + int searchpoint = detectROI.size() + 1; + int search_min_distance = image_width; + int presize = b[bestgroup].size(); + + for (int i = 0; i < detectROI.size(); i++) + { + int distance_x = search_startpoint - bestwidth - detectROI[i].x; + int distance_y = abs(search_starty - detectROI[i].y); + /*******************************/ + + /* if (distance_x>-0.8*bestwidth && distance_x < 1.8*bestwidth){ + printf("bestwidth=%d bestheight=%d bestw=%d \n", bestwidth, bestheight, bestw); + printf("distance_x=%d x=%d \n", distance_x, detectROI[i].x); + printf("width=%d height=%d \n", detectROI[i].width, detectROI[i].height); + }*/ + + + /*******************************/ + if (distance_x>-0.6*bestwidth && distance_x < 1.8*bestwidth && detectROI[i].height > 0.7*bestheight && detectROI[i].height<1.4*bestheight && detectROI[i].width > 0.7*bestw && detectROI[i].width < 1.4*bestw && distance_y<0.4*bestheight) //ѡȡxֵʵĵ㣬ȽλõľѡСģ߶ȿȵɸѡ + { + int distance = (distance_x)*(distance_x)+0.7*(besty0 - detectROI[i].y)*(besty0 - detectROI[i].y);//Ǹx꣬yIJֵӰȨؽ + distance = sqrt(distance); + if (distance < search_min_distance) + { + search_min_distance = distance; + searchpoint = i; + + } + } + } + + + while (searchpoint<(detectROI.size() + 1) && search_min_distance<2 * bestwidth) + { + + b[bestgroup].insert(b[bestgroup].begin(), detectROI[searchpoint]); + search_startpoint = detectROI[searchpoint].x;//ʼλ + search_starty = detectROI[searchpoint].y; + search_min_distance = image_width; + searchpoint = detectROI.size() + 1; + + for (int i = 0; i < detectROI.size(); i++)//Ѱڵŵ + { + int distance_x = search_startpoint - bestwidth - detectROI[i].x; + int distance_y = abs(search_starty - detectROI[i].y); + /*******************************/ + /*if (distance_x>-0.8*bestwidth && distance_x < 1.8*bestwidth && detectROI[i].height > 0.7*bestheight && detectROI[i].height<1.6*bestheight){ + printf("bestwidth=%d bestw=%d bestheight=%d \n", bestwidth, bestw,bestheight); + printf("x=%d distance_x=%d width=%d height=%d \n", detectROI[i].x, distance_x, detectROI[i].width, detectROI[i].height); + }*/ + + /*******************************/ + if (distance_x>-0.6*bestwidth && distance_x < 1.8*bestwidth && detectROI[i].height > 0.7*bestheight && detectROI[i].height<1.4*bestheight && detectROI[i].width > 0.7*bestw && detectROI[i].width < 1.4*bestw && distance_y<0.4*bestheight) //ѡȡxֵʵĵ㣬ȽλõľѡС + { + + int distance = (distance_x)*(distance_x)+0.7*(besty0 - detectROI[i].y)*(besty0 - detectROI[i].y);//Ǹx꣬yIJֵӰȨؽ + distance = sqrt(distance); + // printf("distance=%d \n", distance); + if (distance < search_min_distance) + { + search_min_distance = distance; + searchpoint = i; + if (showSteps) + { + if (search_min_distance < 2 * bestwidth) + printf("search_min_distance=%d \n", search_min_distance); + } + } + } + } + } + + + /********************************Ӻ*****************************/ + search_startpoint = bestx1; + search_starty = besty1; + searchpoint = detectROI.size() + 1; + search_min_distance = image_width; + + + for (int i = 0; i < detectROI.size(); i++) + { + int distance_x = detectROI[i].x - search_startpoint - bestwidth; + int distance_y = abs(search_starty - detectROI[i].y); + /*******************************/ + + /* if (distance_x>-0.8*bestwidth && distance_x < 1.8*bestwidth){ + printf("bestwidth=%d bestheight=%d bestw=%d \n", bestwidth, bestheight, bestw); + printf("distance_x=%d x=%d \n", distance_x, detectROI[i].x); + printf("width=%d height=%d \n", detectROI[i].width, detectROI[i].height); + }*/ + + + /*******************************/ + if (distance_x>-0.6*bestwidth && distance_x < 1.8*bestwidth && detectROI[i].height > 0.7*bestheight && detectROI[i].height<1.4*bestheight && detectROI[i].width > 0.7*bestw && detectROI[i].width < 1.4*bestw && distance_y<0.4*bestheight) //ѡȡxֵʵĵ㣬ȽλõľѡСģ߶ȿȵɸѡ + { + int distance = (distance_x)*(distance_x)+0.7*(besty1 - detectROI[i].y)*(besty1 - detectROI[i].y);//Ǹx꣬yIJֵӰȨؽ + distance = sqrt(distance); + if (distance < search_min_distance) + { + search_min_distance = distance; + searchpoint = i; + + } + } + } + + + while (searchpoint<(detectROI.size() + 1) && search_min_distance<2 * bestwidth) + { + + b[bestgroup].push_back(detectROI[searchpoint]); + search_startpoint = detectROI[searchpoint].x;//ʼλ + search_starty = detectROI[searchpoint].y; + search_min_distance = image_width; + searchpoint = detectROI.size() + 1; + + for (int i = 0; i < detectROI.size(); i++)//Ѱڵŵ + { + int distance_x = detectROI[i].x - search_startpoint - bestwidth; + int distance_y = abs(search_starty - detectROI[i].y); + /*******************************/ + /*if (distance_x < 0.8*bestwidth) + printf("x=%d width=%d height=%d ", detectROI[i].x, detectROI[i].width, detectROI[i].height);*/ + /*******************************/ + if (distance_x>-0.6*bestwidth && distance_x < 1.8*bestwidth && detectROI[i].height > 0.7*bestheight && detectROI[i].height<1.4*bestheight && detectROI[i].width > 0.7*bestwidth && detectROI[i].width < 1.4*bestwidth && distance_y<0.4*bestheight) //ѡȡxֵʵĵ㣬ȽλõľѡС + { + int distance = (distance_x)*(distance_x)+0.7*(besty1 - detectROI[i].y)*(besty1 - detectROI[i].y);//Ǹx꣬yIJֵӰȨؽ + distance = sqrt(distance); + // printf("distance=%d \n", distance); + if (distance < search_min_distance) + { + search_min_distance = distance; + searchpoint = i; + if (showSteps) + { + if (search_min_distance < 0.8*bestwidth) + printf("search_min_distance=%d \n", search_min_distance); + } + } + } + } + } + /*****************bestgroupĻҵǰĿ********/ + + /********************ѡbestgroupдڰϵĿ*********************/ + vector baohan; + for (int i = 0; i < b[bestgroup].size(); i++) + for (int j = 0; j < b[bestgroup].size(); j++) + { + CvRect recti = b[bestgroup][i]; + CvRect rectj = b[bestgroup][j]; + /***********vectorƳԪ*************/ + if (rectj.x >= recti.x && rectj.y >= recti.y && rectj.x + rectj.width < recti.x + recti.width && rectj.y + rectj.height < recti.y + recti.height) + { + baohan.push_back(j); + } + } + vector group;//ȥϵbestgroup + for (int i = 0; i < b[bestgroup].size(); i++) { + int kkk = 0;//kkkΪ0˵򲻴ڱϵ + for (int j = 0; j < baohan.size(); j++) + { + if (i == baohan[j]) + kkk++; + } + if (kkk == 0) + group.push_back(b[bestgroup][i]); + } + + + + + + + + int finalsize = b[bestgroup].size(); + if (showSteps) { + printf("bestx:\n"); + for (int i = 0; i < b[bestgroup].size(); i++) + printf("%d \n", b[bestgroup][i].x); + printf("add=%d \n", (finalsize - presize)); + } + // return b[bestgroup]; + return group; + + } + else + { + printf(" not found \n"); + notfound = 0; + + return detectROI; + } + +} + +vector roinormalization(vectorchooseROI, Mat normalization_detect_obj) +{ + vector normal; + int image_width = normalization_detect_obj.cols; + int image_height = normalization_detect_obj.rows; + int width_average = 0; + int height_average = 0; + if (chooseROI.size() > 1) { + for (int i = 0; i < chooseROI.size(); i++) + { + width_average += chooseROI[i].width; + height_average += chooseROI[i].height; + } + width_average = width_average / chooseROI.size(); + height_average = height_average / chooseROI.size(); + if (width_average * 5>height_average * 3) + height_average = width_average * 5 / 3; + else + width_average = height_average * 3 / 5; + for (int i = 0; i < chooseROI.size(); i++) + { + CvRect roi_normal; + roi_normal.x = chooseROI[i].x + 0.5*chooseROI[i].width - 0.5*width_average; + if (roi_normal.x < 0) + roi_normal.x = 0; + roi_normal.y = chooseROI[i].y + 0.5*chooseROI[i].height - 0.5*height_average; + if (roi_normal.y < 0) + roi_normal.y = 0; + roi_normal.width = width_average; + if (roi_normal.width + roi_normal.x>image_width) + roi_normal.width = image_width - roi_normal.x; + roi_normal.height = height_average; + if (roi_normal.height + roi_normal.y>image_height) + roi_normal.height = image_height - roi_normal.y; + normal.push_back(roi_normal); + } + return normal; + } + else + return chooseROI; +} + +vector roicomplete(vectorroinormalization, Mat normalization_detect_obj) +{ + int image_width = normalization_detect_obj.cols; + int image_height = normalization_detect_obj.rows; + if (roinormalization.size() > 1) + { + int roiwidth = roinormalization[0].width; + int roiheight = roinormalization[0].height; + vector com;//comΪСʱĴ洢 + for (int i = 0; i < roinormalization.size(); i++) + { + com.push_back(roinormalization[i]); + } + + if (showSteps) + cout << "βм䲹ǰ⵽ַΪ" << roinormalization.size() << endl; + int sum = 0; + int avg_distance_of_chars = 0; + if (roinormalization.size() > 2) { + int numSum = 0; + + for (int i = 0; i 0.1*roiwidth)) + { + int j = (distance + 0.2*roiwidth) / (0.8 * roiwidth);//0.6-1.4ʱһλ1.4-2.2λ + if (showSteps) { + cout << roinormalization[i].x - roinormalization[i - 1].x - roiwidth << " " << (0.6 * roiwidth) << " " << j << endl; + cout << (roinormalization[i].x - roinormalization[i - 1].x) % cvRound(0.6*(roiwidth + 2 * avg_distance_of_chars)) << " " + << cvRound(0.6*(roiwidth + 2 * avg_distance_of_chars)) << " " << (roiwidth + 2 * avg_distance_of_chars) << endl; + } + if ((distance > 0.6*(roiwidth + 1.5 * avg_distance_of_chars)) && (j == 0))//һ˵ƽΪ-0.2w,ǰſ볬0.3wʱԲһλ + { + j++; + } + if (showSteps) { + cout << "add 1:" << j << " CvRect" << endl; + } + + for (int n = 0; n < j; n++) { //j = 1϶ĵ + + CvPoint centerP; + centerP.x = roinormalization[i].x - (2 * (j - n) - 1) * (roinormalization[i].x - (roinormalization[i - 1].x + roiwidth)) / (2 * j); + centerP.y = (roinormalization[i].y + roinormalization[i - 1].y) / 2; + + CvRect Roi; + Roi.x = centerP.x - roiwidth / 2; + Roi.y = centerP.y; + Roi.width = roiwidth; + Roi.height = roiheight; + + com.push_back(Roi); + } + + } + + else if ((roinormalization[i].x - 2.7*(roiwidth + avg_distance_of_chars) > roinormalization[0].x) && (distance > 0.01*roiwidth))//һȽԶҲǿ4λ + { + //int j = (roinormalization[i].x - roinormalization[i - 1].x - roiwidth - 2 * avg_distance_of_chars) / (0.6 * roiwidth);//һ㷨 + int j = (roinormalization[i].x - roinormalization[i - 1].x - 0.5*roiwidth) / (roiwidth + avg_distance_of_chars);//һ㷨 + //int j = (distance + 0.6*roiwidth) / (0.9 * roiwidth); + if (showSteps) { + cout << roinormalization[i].x - roinormalization[i - 1].x - roiwidth - 2 * avg_distance_of_chars << " " << (0.6 * roiwidth) << " " << j << endl; + cout << (roinormalization[i].x - roinormalization[i - 1].x) % cvRound(0.8*(roiwidth + 2 * avg_distance_of_chars)) << " " + << cvRound(0.6*(roiwidth + 2 * avg_distance_of_chars)) << " " << (roiwidth + 2 * avg_distance_of_chars) << endl; + } + if (((roinormalization[i].x - roinormalization[i - 1].x - roiwidth) > 0.6*(roiwidth + 2 * avg_distance_of_chars)) && (j == 0)) { + j++; + } + if (showSteps) { + cout << "add 2:" << j << " CvRect" << endl; + } + + for (int n = 0; n < j; n++) { //j = 1϶ĵ + + CvPoint centerP; + centerP.x = roinormalization[i].x - (2 * (j - n) - 1) * (roinormalization[i].x - (roinormalization[i - 1].x + roiwidth)) / (2 * j); + centerP.y = (roinormalization[i].y + roinormalization[i - 1].y) / 2; + + CvRect Roi; + Roi.x = centerP.x - roiwidth / 2; + Roi.y = centerP.y; + Roi.width = roiwidth; + Roi.height = roiheight; + + com.push_back(Roi); + } + } + } + if (com.size() == 6)//ǰ + { + int size = com.size(); + CvRect front, front2;//front2frontǰ棨бҪӵĻ + CvRect behind, behind2;//behind2behind棨бҪӵĻ + if (roinormalization[1].x - roinormalization[0].x > roiwidth)//ǰһ͵ڶ + { + if (roinormalization[0].x - 0.8*roiwidth > 0) + front.x = roinormalization[0].x - 0.8*roiwidth; + else + front.x = 0; + front.y = 2 * roinormalization[0].y - roinormalization[1].y; + if (front.y < 0) + front.y = 0; + if (front.y + roiheight>image_height) + front.y = image_height - roiheight; + front.width = roiwidth; + front.height = roiheight; + if (roinormalization[0].x - front.x > 0.6*roiwidth) + { + com.insert(com.begin(), front);//ӵһ ۲Ƿӵڶ + + } + } + else // + { + int ddd = roinormalization[1].x - roinormalization[0].x; + if (ddd > 0.8*roiwidth) + ddd = 0.8*roiwidth; + if (roinormalization[0].x - ddd > 0) + front.x = roinormalization[0].x - ddd; + else + front.x = 0; + front.y = 2 * roinormalization[0].y - roinormalization[1].y; + if (front.y < 0) + front.y = 0; + if (front.y + roiheight>image_height) + front.y = image_height - roiheight; + front.width = roiwidth; + front.height = roiheight; + if (roinormalization[0].x - front.x>0.6*ddd) + com.insert(com.begin(), front); + } + if (roinormalization[roinormalization.size() - 1].x - roinormalization[roinormalization.size() - 2].x > roiwidth)//һ͵ڶĿ + { + if (roinormalization[roinormalization.size() - 1].x + 2 * roiwidth < image_width) + behind.x = roinormalization[roinormalization.size() - 1].x + roiwidth; + else + behind.x = image_width - roiwidth; + behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + if (behind.y < 0) + behind.y = 0; + if (behind.y + roiheight>image_height) + behind.y = image_height - roiheight; + behind.width = roiwidth; + behind.height = roiheight; + if (behind.x - roinormalization[roinormalization.size() - 1].x>0.6*roiwidth) + com.push_back(behind); + } + else//һ͵ڶĿ + { + int fff = roinormalization[roinormalization.size() - 1].x - roinormalization[roinormalization.size() - 2].x;//Ϊfff + if (roinormalization[roinormalization.size() - 1].x + roiwidth + fff < image_width)//δԽ + { + behind.x = roinormalization[roinormalization.size() - 1].x + fff; + behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + if (behind.y < 0) + behind.y = 0; + if (behind.y + roiheight>image_height) + behind.y = image_height - roiheight; + behind.width = roiwidth; + behind.height = roiheight; + com.push_back(behind); + } + else //Խ + { + behind.width = roiwidth; + behind.height = roiheight; + behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + if (behind.y < 0) + behind.y = 0; + if (behind.y + roiheight>image_height) + behind.y = image_height - roiheight; + behind.x = image_width - roiwidth; + com.push_back(behind); + } + } + + } + if (com.size() == 5 || com.size() == 4) + { + int size = com.size(); + CvRect front, front2;//front2frontǰ棨бҪӵĻ + CvRect behind, behind2;//behind2behind棨бҪӵĻ + if (roinormalization[1].x - roinormalization[0].x > roiwidth)//ǰһ͵ڶ + { + if (roinormalization[0].x - 0.8*roiwidth > 0) + front.x = roinormalization[0].x - 0.8*roiwidth; + else + front.x = 0; + front.y = 2 * roinormalization[0].y - roinormalization[1].y; + if (front.y < 0) + front.y = 0; + if (front.y + roiheight>image_height) + front.y = image_height - roiheight; + front.width = roiwidth; + front.height = roiheight; + if (roinormalization[0].x - front.x > 0.7*roiwidth) + { + com.insert(com.begin(), front);//ӵһ ۲Ƿӵڶ + front2.x = front.x - 0.8*roiwidth; + front2.y = 2 * front.y - roinormalization[0].y; + front2.width = roiwidth; + front2.height = roiheight; + if (front2.x >= 0 && front2.y >= 0 && front2.y + roiheight< image_height) + com.insert(com.begin(), front2); + + } + } + else // + { + /*int ddd = roinormalization[1].x - roinormalization[0].x; + if (ddd > 0.8*roiwidth) + ddd = 0.8*roiwidth;*/ + if (roinormalization[0].x - 1.1*roiwidth > 0) + front.x = roinormalization[0].x - 1.1*roiwidth; + else + front.x = 0; + front.y = 2 * roinormalization[0].y - roinormalization[1].y; + if (front.y < 0) + front.y = 0; + if (front.y + roiheight>image_height) + front.y = image_height - roiheight; + front.width = roiwidth; + front.height = roiheight; + if (roinormalization[0].x - front.x > 0.7*roiwidth) + { + com.insert(com.begin(), front); + front2.x = front.x - 0.8*roiwidth; + front2.y = 2 * front.y - roinormalization[0].y; + front2.width = roiwidth; + front2.height = roiheight; + if (front2.x >= 0 && front2.y >= 0 && front2.y + roiheight< image_height) + com.insert(com.begin(), front2); + } + } + if (roinormalization[roinormalization.size() - 1].x - roinormalization[roinormalization.size() - 2].x > roiwidth)//һ͵ڶĿ + { + if (roinormalization[roinormalization.size() - 1].x + 2 * roiwidth < image_width) + behind.x = roinormalization[roinormalization.size() - 1].x + roiwidth; + else + behind.x = image_width - roiwidth; + behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + if (behind.y < 0) + behind.y = 0; + if (behind.y + roiheight > image_height) + behind.y = image_height - roiheight; + behind.width = roiwidth; + behind.height = roiheight; + if (behind.x - roinormalization[roinormalization.size() - 1].x > 0.95*roiwidth) { + com.push_back(behind); + behind2.x = behind.x + roiwidth; + behind2.y = 2 * behind.y - roinormalization[roinormalization.size() - 1].y; + behind2.width = roiwidth; + behind2.height = roiheight; + if (behind2.x + roiwidth= 0 && behind2.y + roiheight < image_height) + com.push_back(behind2); + + } + } + else//һ͵ڶĿ + { + int fff = roinormalization[roinormalization.size() - 1].x - roinormalization[roinormalization.size() - 2].x;//Ϊfff + if (roinormalization[roinormalization.size() - 1].x + roiwidth + fff < image_width)//δԽ + { + behind.x = roinormalization[roinormalization.size() - 1].x + fff; + behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + if (behind.y < 0) + behind.y = 0; + if (behind.y + roiheight>image_height) + behind.y = image_height - roiheight; + behind.width = roiwidth; + behind.height = roiheight; + com.push_back(behind); + behind2.x = behind.x + fff; + behind2.y = 2 * behind.y - roinormalization[roinormalization.size() - 1].y; + behind2.width = roiwidth; + behind2.height = roiheight; + if (behind2.x + roiwidth= 0 && behind2.y + roiheight < image_height) + com.push_back(behind2); + } + else //Խ + { + behind.width = roiwidth; + behind.height = roiheight; + behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + if (behind.y < 0) + behind.y = 0; + if (behind.y + roiheight>image_height) + behind.y = image_height - roiheight; + behind.x = image_width - roiwidth; + if (behind.x - roinormalization[roinormalization.size() - 1].x > 0.7*roiwidth) + com.push_back(behind); + } + } + + } + + return com; + } + else + return roinormalization; + + +} + +vector roicomplete2(vectorroinormalization, Mat normalization_detect_obj)//ʹһ취ȫ +{ + int image_width = normalization_detect_obj.cols; + int image_height = normalization_detect_obj.rows; + if (roinormalization.size() > 1) + { + int roiwidth = roinormalization[0].width; + int roiheight = roinormalization[0].height; + vector com;//comΪСʱĴ洢 + for (int i = 0; i < roinormalization.size(); i++) + { + com.push_back(roinormalization[i]); + } + for (int i = 1; i < roinormalization.size(); i++)//м + { + int distance = roinormalization[i].x - roinormalization[i - 1].x - roiwidth; + if (distance>0.4*roiwidth) + { + CvRect buchong; + buchong.x = (roinormalization[i].x + roinormalization[i - 1].x) / 2; + buchong.y = (roinormalization[i].y + roinormalization[i - 1].y) / 2; + buchong.width = roiwidth; + buchong.height = roiheight; + com.push_back(buchong); + + } + else if (i > 2 && distance > 0.1*roiwidth)//ļ϶СһЩ + { + CvRect buchong2; + buchong2.x = (roinormalization[i].x + roinormalization[i - 1].x) / 2; + buchong2.y = (roinormalization[i].y + roinormalization[i - 1].y) / 2; + buchong2.width = roiwidth; + buchong2.height = roiheight; + com.push_back(buchong2); + } + } + //if (com.size() == 5) + //{ + // CvRect add1; + // CvRect add2; + // int carplatepoint = -1;//жϳƵλ + // for (int i = 1; i < roinormalization.size(); i++) + // { + // int pointnumber = 0; + + // } + + //} + //if (com.size() == 5 || com.size() == 6)//ǰ + //{ + // CvRect front; + // CvRect behind; + // if (roinormalization[1].x - roinormalization[0].x > roiwidth) + // { + // if (roinormalization[0].x - 0.8*roiwidth > 0) + // front.x = roinormalization[0].x - 0.8*roiwidth; + // else + // front.x = 0; + // front.y = 2 * roinormalization[0].y - roinormalization[1].y; + // if (front.y < 0) + // front.y = 0; + // if (front.y + roiheight>image_height) + // front.y = image_height - roiheight; + // front.width = roiwidth; + // front.height = roiheight; + // if (roinormalization[0].x - front.x>0.7*roiwidth) + // com.insert(com.begin(), front); + // } + // else + // { + // int ddd = roinormalization[1].x - roinormalization[0].x; + // if (ddd > 0.8*roiwidth) + // ddd = 0.8*roiwidth; + // if (roinormalization[0].x - ddd > 0) + // front.x = roinormalization[0].x - ddd; + // else + // front.x = 0; + // front.y = 2 * roinormalization[0].y - roinormalization[1].y; + // if (front.y < 0) + // front.y = 0; + // if (front.y + roiheight>image_height) + // front.y = image_height - roiheight; + // front.width = roiwidth; + // front.height = roiheight; + // if (roinormalization[0].x - front.x>0.8*ddd) + // com.insert(com.begin(), front); + // } + // if (roinormalization[roinormalization.size() - 1].x - roinormalization[roinormalization.size() - 2].x > roiwidth)//һ͵ڶĿ + // { + // if (roinormalization[roinormalization.size() - 1].x + 2 * roiwidth < image_width) + // behind.x = roinormalization[roinormalization.size() - 1].x + roiwidth; + // else + // behind.x = image_width - roiwidth; + // behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + // if (behind.y < 0) + // behind.y = 0; + // if (behind.y + roiheight>image_height) + // behind.y = image_height - roiheight; + // behind.width = roiwidth; + // behind.height = roiheight; + // if (behind.x - roinormalization[roinormalization.size() - 1].x>0.95*roiwidth) + // com.push_back(behind); + // } + // else//һ͵ڶĿ + // { + // int fff = roinormalization[roinormalization.size() - 1].x - roinormalization[roinormalization.size() - 2].x;//Ϊfff + // if (roinormalization[roinormalization.size() - 1].x + roiwidth + fff < image_width)//δԽ + // { + // behind.x = roinormalization[roinormalization.size() - 1].x + fff; + // behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + // if (behind.y < 0) + // behind.y = 0; + // if (behind.y + roiheight>image_height) + // behind.y = image_height - roiheight; + // behind.width = roiwidth; + // behind.height = roiheight; + // com.push_back(behind); + // } + // else if (com.size() == 6)//Խ + // { + // behind.width = roiwidth; + // behind.height = roiheight; + // behind.y = 2 * roinormalization[roinormalization.size() - 1].y - roinormalization[roinormalization.size() - 2].y; + // if (behind.y < 0) + // behind.y = 0; + // if (behind.y + roiheight>image_height) + // behind.y = image_height - roiheight; + // behind.x = image_width - roiwidth; + // com.push_back(behind); + // } + // } + + //} + return com; + } + else + return roinormalization; + + +} + +vectorbuchong(vectorROI_choose, Mat normalization_detect_obj) +{ + vector qqq; + int image_width = normalization_detect_obj.cols; + int image_height = normalization_detect_obj.rows; + int srcsize = ROI_choose.size(); + for (int i = 1; i < srcsize; i++) { + qqq.push_back(ROI_choose[i]); + } + for (int i = 1; i < srcsize; i++) + { + CvRect left; + CvRect right; + CvRect up; + CvRect down; + left.x = ROI_choose[i].x - 4; + left.y = ROI_choose[i].y; + left.width = ROI_choose[i].width; + left.height = ROI_choose[i].height; + if (left.x>0) + qqq.push_back(left); + // printf("x=%d,y=%d", left.x, left.y); + + + right.x = ROI_choose[i].x + 4; + right.y = ROI_choose[i].y; + right.width = ROI_choose[i].width; + right.height = ROI_choose[i].height; + if (right.x + right.width 0) + qqq.push_back(up); + + down.x = ROI_choose[i].x; + down.y = ROI_choose[i].y + 4; + down.width = ROI_choose[i].width; + down.height = ROI_choose[i].height; + if (down.y + down.height < image_height) + qqq.push_back(down); + } + return qqq; + +} + +vectorhanzibuchong(vectorROI_choose, Mat normalization_detect_obj) { + vector qqq; + int image_width = normalization_detect_obj.cols; + int image_height = normalization_detect_obj.rows; + + qqq.push_back(ROI_choose[0]); + + for (int i = 0; i < 1; i++) + { + CvRect left; + CvRect right; + CvRect up; + CvRect down; + left.x = ROI_choose[i].x - 4; + left.y = ROI_choose[i].y; + left.width = ROI_choose[i].width; + left.height = ROI_choose[i].height; + if (left.x>0) + qqq.push_back(left); + // printf("x=%d,y=%d", left.x, left.y); + + + right.x = ROI_choose[i].x + 4; + right.y = ROI_choose[i].y; + right.width = ROI_choose[i].width; + right.height = ROI_choose[i].height; + if (right.x + right.width 0) + qqq.push_back(up); + + down.x = ROI_choose[i].x; + down.y = ROI_choose[i].y + 4; + down.width = ROI_choose[i].width; + down.height = ROI_choose[i].height; + if (down.y + down.height < image_height) + qqq.push_back(down); + } + return qqq; + +} + +char* outputplate(int predict) { + switch (predict) + { + case 0: + //printf("0"); + return "0"; + break; + case 1: + //printf("1"); + return "1"; + break; + case 2: + //printf("2"); + return "2"; + break; + case 3: + //printf("3"); + return "3"; + break; + case 4: + //printf("4"); + return "4"; + break; + case 5: + //printf("5"); + return "5"; + break; + case 6: + //printf("6"); + return "6"; + break; + case 7: + //printf("7"); + return "7"; + break; + case 8: + //printf("8"); + return "8"; + break; + case 9: + //printf("9"); + return "9"; + break; + case 10: + //printf("A"); + return "A"; + break; + case 11: + //printf("B"); + return "B"; + break; + case 12: + //printf("C"); + return "C"; + break; + case 13: + //printf("D"); + return "D"; + break; + case 14: + //printf("E"); + return "E"; + break; + case 15: + //printf("F"); + return "F"; + break; + case 16: + //printf("G"); + return "G"; + break; + case 17: + //printf("H"); + return "H"; + break; + case 18: + //printf("J"); + return "J"; + break; + case 19: + //printf("K"); + return "K"; + break; + case 20: + //printf("L"); + return "L"; + break; + case 21: + //printf("M"); + return "M"; + break; + case 22: + //printf("N"); + return "N"; + break; + case 23: + //printf("P"); + return "P"; + break; + case 24: + //printf("Q"); + return "Q"; + break; + case 25: + //printf("R"); + return "R"; + break; + case 26: + //printf("S"); + return "S"; + break; + case 27: + //printf("T"); + return "T"; + break; + case 28: + //printf("U"); + return "U"; + break; + case 29: + //printf("V"); + return "V"; + break; + case 30: + // printf("W"); + return "W"; + break; + case 31: + // printf("X"); + return "X"; + break; + case 32: + // printf("Y"); + return "Y"; + break; + case 33: + // printf("Z"); + return "Z"; + break; + case 34: + // printf("ѧ"); + return "ѧ"; + break; + case 35: + // printf(""); + return ""; + break; + case 36: + // printf(""); + return ""; + break; + case -1: + // printf("?"); + return "?"; + break; + + } + // printf(" "); + +} + +char* outputhanzi(int predict) { + switch (predict) + { + case 0: + //printf(""); + return ""; + break; + case 1: + //printf(""); + return ""; + break; + case 2: + //printf(""); + return ""; + break; + case 3: + //printf(""); + return ""; + break; + case 4: + //printf(""); + return ""; + break; + case 5: + //printf(""); + return ""; + break; + case 6: + //printf(""); + return ""; + break; + case 7: + //printf(""); + return ""; + break; + case 8: + //printf(""); + return ""; + break; + case 9: + //printf(""); + return ""; + break; + case 10: + //printf(""); + return ""; + break; + case 11: + //printf(""); + return ""; + break; + case 12: + //printf(""); + return ""; + break; + case 13: + //printf(""); + return ""; + break; + case 14: + //printf(""); + return ""; + break; + case 15: + //printf("³"); + return "³"; + break; + case 16: + //printf(""); + return ""; + break; + case 17: + //printf(""); + return ""; + break; + case 18: + //printf(""); + return ""; + break; + case 19: + //printf(""); + return ""; + break; + case 20: + //printf(""); + return ""; + break; + case 21: + //printf(""); + return ""; + break; + case 22: + //printf(""); + return ""; + break; + case 23: + //printf(""); + return ""; + break; + case 24: + //printf(""); + return ""; + break; + case 25: + //printf(""); + return ""; + break; + case 26: + //printf(""); + return ""; + break; + case 27: + //printf("ԥ"); + return "ԥ"; + break; + case 28: + //printf(""); + return ""; + break; + case 29: + //printf(""); + return ""; + break; + case 30: + //printf(""); + return ""; + break; + case -1: + //printf("?"); + return "?"; + break; + + } + //printf(" "); +} + +//BOOL sort_by_x(cv::Point2i point1, cv::Point2i point2) { +// return (point1.x < point2.x); +//} + +void show_choose_step(cv::Mat src, vector ROI_choose, char* windName) { + if (showSteps) + { + cv::Mat shaixuan_obj; + src.copyTo(shaixuan_obj); + + vector color; + cv::Scalar magenta = cv::Scalar(255, 0, 255); + magenta = cv::Scalar(255, 0, 0);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(0, 255, 0);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(0, 0, 255);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(255, 255, 0);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(255, 0, 255);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(0, 0, 0);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(0, 255, 255);// //Draw rectangle around the face + color.push_back(magenta); + + magenta = cv::Scalar(100, 13, 200);// //Draw rectangle around the face + color.push_back(magenta); + + color.push_back(magenta); + color.push_back(magenta); + color.push_back(magenta); + color.push_back(magenta); + + for (unsigned int j = 0; j < ROI_choose.size(); j++) + { + const cv::Rect& single_char_roi = ROI_choose[j]; + printf("x=%d y=%d w=%d h=%d i=%d \n", single_char_roi.x, single_char_roi.y, single_char_roi.width, single_char_roi.height, j); + cv::Point tl(single_char_roi.x, single_char_roi.y);//Get top-left and bottom-right corner points + cv::Point br = tl + cv::Point(single_char_roi.width, single_char_roi.height); + cv::rectangle(shaixuan_obj, tl, br, color[j], 1, 1, 0); + //detectROI0.push_back(single_char_roi); + } + + namedWindow(windName, 0); + imshow(windName, shaixuan_obj); + //cvWaitKey(); + } +} + +void RandomizeIdx(int *idx, int g_cCountTrainingSample) +{ + int i, j; + + srand((unsigned)time(0)); + + for (i = 0; i point; + int label = 0; + cv::Point2i centor = cv::Point2i(0, 0); + int avg_width = 0; + int avg_height = 0; +}result_init; + +typedef std::pair Prediction; +static bool PairCompare(const std::pair& lhs, + const std::pair& rhs) { + return lhs.first > rhs.first; +} + +/* Return the indices of the top N values of vector v. */ +static std::vector Argmax(const std::vector& v, int N) { + std::vector > pairs; + for (size_t i = 0; i < v.size(); ++i) + pairs.push_back(std::make_pair(v[i], static_cast(i))); + std::partial_sort(pairs.begin(), pairs.begin() + N, pairs.end(), PairCompare); + + std::vector result; + for (int i = 0; i < N; ++i) + result.push_back(pairs[i].second); + return result; +} + +BOOL sort_by_centor(result_ result1, result_ result2) { + return (result1.centor.x < result2.centor.x); +} +/* Return the top N predictions. */ + +/* Load the mean file in binaryproto format. */ +BOOL sort_by_x2(cv::Point2i point1, cv::Point2i point2) { + return (point1.x < point2.x); +} + +bool sort_by_x(CvRect obj1, CvRect obj2) +{ + return obj1.x < obj2.x; +} + +vector split(const string &s, const string &seperator) { + vector result; + typedef string::size_type string_size; + string_size i = 0; + + while (i != s.size()) { + //ҵַ׸ڷָĸ + int flag = 0; + while (i != s.size() && flag == 0) { + flag = 1; + for (string_size x = 0; x < seperator.size(); ++x) + if (s[i] == seperator[x]) { + ++i; + flag = 0; + break; + } + } + + //ҵһַָָ֮ȡ + flag = 0; + string_size j = i; + while (j != s.size() && flag == 0) { + for (string_size x = 0; x < seperator.size(); ++x) + if (s[j] == seperator[x]) { + flag = 1; + break; + } + if (flag == 0) + ++j; + } + if (i != j) { + result.push_back(s.substr(i, j - i)); + i = j; + } + } + return result; +} + +int outputplate(string predict) { + if (predict == "0") return 0; + else if (predict == "1") return 1; + else if (predict == "2") return 2; + else if (predict == "3") return 3; + else if (predict == "4") return 4; + else if (predict == "5") return 5; + else if (predict == "6") return 6; + else if (predict == "7") return 7; + else if (predict == "8") return 8; + else if (predict == "9") return 9; + else if (predict == "A") return 10; + else if (predict == "B") return 11; + else if (predict == "C") return 12; + else if (predict == "D") return 13; + else if (predict == "E") return 14; + else if (predict == "F") return 15; + else if (predict == "G") return 16; + else if (predict == "H") return 17; + else if (predict == "J") return 18; + else if (predict == "K") return 19; + else if (predict == "L") return 20; + else if (predict == "M") return 21; + else if (predict == "N") return 22; + else if (predict == "P") return 23; + else if (predict == "Q") return 24; + else if (predict == "R") return 25; + else if (predict == "S") return 26; + else if (predict == "T") return 27; + else if (predict == "U") return 28; + else if (predict == "V") return 29; + else if (predict == "W") return 30; + else if (predict == "X") return 31; + else if (predict == "Y") return 32; + else if (predict == "Z") return 33; + else if (predict == "ѧ") return 34; + else if (predict == "") return 35; + else if (predict == "") return 36; + else return -1; +} + +int outputhanzi(string predict) { + if (predict == "") return 0; + else if (predict == "") return 1; + else if (predict == "") return 2; + else if (predict == "") return 3; + else if (predict == "") return 4; + else if (predict == "") return 5; + else if (predict == "") return 6; + else if (predict == "") return 7; + else if (predict == "") return 8; + else if (predict == "") return 9; + else if (predict == "") return 10; + else if (predict == "") return 11; + else if (predict == "") return 12; + else if (predict == "") return 13; + else if (predict == "") return 14; + else if (predict == "³") return 15; + else if (predict == "") return 16; + else if (predict == "") return 17; + else if (predict == "") return 18; + else if (predict == "") return 19; + else if (predict == "") return 20; + else if (predict == "") return 21; + else if (predict == "") return 22; + else if (predict == "") return 23; + else if (predict == "") return 24; + else if (predict == "") return 25; + else if (predict == "") return 26; + else if (predict == "ԥ") return 27; + else if (predict == "") return 28; + else if (predict == "") return 29; + else if (predict == "") return 30; + else return -1; +} + +void detectAndDisplay(Mat frame) +{ + std::vector faces; + Mat frame_gray; + + cvtColor(frame, frame_gray, COLOR_BGR2GRAY); + equalizeHist(frame_gray, frame_gray); + + //-- Detect faces + /*imageΪĻҶͼobjectsΪõľο飬 + scaleFactorΪÿһͼ߶еij߶ȲĬֵΪ1.1 + minNeighborsΪÿһӦñڽĬΪ3 + flagsµķûãĿǰhaarǾɰģ + CV_HAAR_DO_CANNY_PRUNINGCannyԵųһЩԵٻߺܶͼ + CV_HAAR_SCALE_IMAGEǰ⣬ + CV_HAAR_FIND_BIGGEST_OBJECTֻ壬 + CV_HAAR_DO_ROUGH_SEARCHֻԼ⣩ĬΪ0. + minSizemaxSizeƵõĿķΧ*/ + + car_cascade.detectMultiScale(frame_gray, faces, 1.15, 2, 0 | CV_HAAR_SCALE_IMAGE, Size(20, 20), Size(170, 170)); // 2. ???? + + + for (size_t i = 0; i < faces.size(); i++) + { + Point center(faces[i].x + faces[i].width*0.5, faces[i].y + faces[i].height*0.5); + ellipse(frame, center, Size(faces[i].width*0.5, faces[i].height*0.5), 0, 0, 360, Scalar(255, 0, 255), 1, 8, 0); + } + //-- Show what you got + imshow("cascadetest", frame); +} + +class Timer { + using Clock = std::chrono::high_resolution_clock; +public: + /*! \brief start or restart timer */ + inline void Tic() { + start_ = Clock::now(); + } + /*! \brief stop timer */ + inline void Toc() { + end_ = Clock::now(); + } + /*! \brief return time in ms */ + inline double Elasped() { + auto duration = std::chrono::duration_cast(end_ - start_); + return duration.count(); + } + +private: + Clock::time_point start_, end_; +}; + +//bool rec_char(caffe::Net& net, cv::Mat src, int& predict, double& loss) { +// auto input = net.blob_by_name("data"); +// input->Reshape({ 1, 1, 35, 21 });//{64, 100, 1, 1} +// float *data = input->mutable_cpu_data();//ʶ +// const int n = input->count(); +// //cv::Mat src = cv::imread("img/char4.jpg", 0); +// +// cv::Mat src2; +// src.convertTo(src2, CV_32F); +// cv::resize(src2, src2, cv::Size(21, 35)); +// for (int i = 0; i < n; ++i) { +// data[i] = src2.at(i / src2.cols, i%src2.cols) / 256; /* nd(gen);*///תͼ +// } +// // forward +// /*Timer timer; +// timer.Tic();*/ +// net.Forward(); +//#ifndef US_CPP +// off_netiof();//close net.txt +//#endif +// //timer.Toc(); +// // visualization +// auto images = net.blob_by_name("prob");//gconv5 +// /*std::cout << net.blob_by_name("prob")->shape_string() << std::endl;*/ +// const int num = images->num(); +// const int channels = images->channels(); +// const int height = images->height(); +// const int width = images->width(); +// const int canvas_len = std::ceil(std::sqrt(num)); +// for (int i = 0; i < channels; i++) { +// if (i == 0) { +// loss = images->mutable_cpu_data()[i];//ȡLoss +// predict = 0; +// } +// else { +// if (images->mutable_cpu_data()[i]>loss) { +// loss = images->mutable_cpu_data()[i]; +// predict = i; //óʶ +// } +// } +// /*std::cout << images->mutable_cpu_data()[i] << std::endl;*/ +// } +// return true; +//} + + + + + diff --git a/ocr_test/config.txt b/ocr_test/config.txt new file mode 100644 index 0000000000000000000000000000000000000000..99c9b3cd9b3662f87fa217c62cb46f7c628acd3c --- /dev/null +++ b/ocr_test/config.txt @@ -0,0 +1,6 @@ +path = D:\ocr\plate_card_BLSTM\testData\plate_test_pic\chepai +startNum = 0 + +path = C:\plate_card_BLSTM\testData\test +startNum = 0 + diff --git a/ocr_test/levenshtein.cpp b/ocr_test/levenshtein.cpp new file mode 100644 index 0000000000000000000000000000000000000000..705306d2a2ae7eecc06d60e97dd9a6ba9cd13090 --- /dev/null +++ b/ocr_test/levenshtein.cpp @@ -0,0 +1,155 @@ +#include +#include +#include "levenshtein.h" + +static int minimum(int a,int b,int c) +/*Gets the minimum of three values*/ +{ + int min=a; + if(b offset) && (l2-1 > offset) && (s1[l1-1] == s2[l2-1])) { + l1--; + l2--; + } + + l1 -= offset; + l2 -= offset; + + /* The Levenshtein algorithm itself. */ + + /* s1= */ + /* ERIK */ + /* */ + /* 01234 */ + /* s2=V 11234 */ + /* E 21234 */ + /* E 32234 */ + /* N 43334 <- prev_row */ + /* S 54444 <- curr_row */ + /* T 65555 */ + /* R 76566 */ + /* A 87667 */ + + /* Allocate memory for both rows */ + + prev_row = (int*)malloc(l1+1); + curr_row = (int*)malloc(l1+1); + + if ((prev_row == NULL) || (curr_row == NULL)) { + return -1; + } + + /* Initialize the current row. */ + + for (col=0; col<=l1; col++) { + curr_row[col] = col; + } + + for (row=1; row<=l2; row++) { + /* Copy the current row to the previous row. */ + + memcpy(prev_row, curr_row, sizeof(int)*(l1+1)); + + /* Calculate the values of the current row. */ + + curr_row[0] = row; + curr_row_min = row; + + for (col=1; col<=l1; col++) { + /* Equal (cost=0) or substitution (cost=1). */ + + curr_row[col] = prev_row[col-1] + ((s1[offset+col-1] == s2[offset+row-1]) ? 0 : 1); + + /* Insertion if it's cheaper than substitution. */ + + if (prev_row[col]+1 < curr_row[col]) { + curr_row[col] = prev_row[col]+1; + } + + /* Deletion if it's cheaper than substitution. */ + + if (curr_row[col-1]+1 < curr_row[col]) { + curr_row[col] = curr_row[col-1]+1; + } + + /* Keep track of the minimum value on this row. */ + + if (curr_row[col] < curr_row_min) { + curr_row_min = curr_row[col]; + } + } + + /* Return nil as soon as we exceed the threshold. */ + + if (threshold > -1 && curr_row_min >= threshold) { + free(prev_row); + free(curr_row); + + return -1; + } + } + + /* The result is the last value on the last row. */ + + result = curr_row[l1]; + + free(prev_row); + free(curr_row); + + return result; +} \ No newline at end of file diff --git a/ocr_test/levenshtein.h b/ocr_test/levenshtein.h new file mode 100644 index 0000000000000000000000000000000000000000..79a531531eec3439bb5f11fd5a67f60c243ec14f --- /dev/null +++ b/ocr_test/levenshtein.h @@ -0,0 +1,2 @@ +int levenshtein(char * s1, int l1, char * s2, int l2, int threshold); +int levenshtein_distance(char *s, int n, char*t, int m, int noop); \ No newline at end of file diff --git a/ocr_test/ocr_test.cpp b/ocr_test/ocr_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f44da25135196aa5ab56c7a498feb25e12530c3d --- /dev/null +++ b/ocr_test/ocr_test.cpp @@ -0,0 +1,2188 @@ +// ocr_test.cpp : ̨Ӧóڵ㡣 +// + +#include "stdafx.h" + +#include +#include +#include "public.h" + +#include "ICNNPredict.h" + + +//#define CPU_ONLY + +#ifndef CPU_ONLY +#ifdef _DEBUG +#pragma comment(lib,"libClassificationd.lib") +#else +#pragma comment(lib,"libClassification.lib") +#endif + +#else + +#ifdef _DEBUG +#pragma comment(lib,"libClassificationCPU-MKLd.lib") +#else +#pragma comment(lib,"libClassificationCPU-MKL.lib") +#endif +#endif + +#include "bktree.h" +#include "levenshtein.h" + +#include +#include +#include +#include +#include +#include +using namespace std; + +#include + + +int GetUppercaseNum(const string& str) +{ + int n = 0; + for (size_t i = 0; i < str.size(); i++) + { + if (str[i] >= 'A' && str[i] <= 'Z') + n++; + } + return n; +} + + +std::wstring string2wstring(const string& str, bool bSrcIsUTF8 = true) +{ +#ifdef _WIN32 + UINT srcCode = bSrcIsUTF8 ? CP_UTF8 : CP_ACP; + int len = ::MultiByteToWideChar(srcCode, + 0, + str.c_str(), + -1, + NULL, + 0); + if (len == 0) + return wstring(); + + WCHAR* dst = new WCHAR[len]; + int nRet = ::MultiByteToWideChar(srcCode, + 0, + str.c_str(), + -1, + dst, + len); +#else + //printf("=====str====%s,len=%lu\n", str.c_str(), str.size()); + wstring wstr = convert_mb2wc("utf-8", "ucs-2", str); + // if (wstr.size() == 0) + // wstr = convert_mb2wc("gb2312", "ucs-2", str); + // if(wstr.size()==0) + // wstr = convert_mb2wc("ascii", "ucs-2", str); + +#endif + + wstring wstr = dst; + delete[]dst; + + + return wstr; +} + + + +void FindAllImages(const char *folder, std::vector& vImgPaths, bool bSubFolder) +{ + +#ifdef WIN32 + char szPathName[MAX_PATH]; + strcpy_s(szPathName, folder); + if (szPathName[strlen(szPathName) - 1] != '\\') + strcat_s(szPathName, "\\"); + + char szFileName[256]; + strcpy_s(szFileName, szPathName); + strcat_s(szFileName, "*.*"); + + int ret = 0; + WIN32_FIND_DATA wfd; + HANDLE hFind = FindFirstFile(szFileName, &wfd); + if (hFind != INVALID_HANDLE_VALUE) + { + do + { + if (strcmp(wfd.cFileName, ".") == 0 || strcmp(wfd.cFileName, "..") == 0) + continue; + + if (wfd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) + { + if (bSubFolder) + { + strcpy_s(szFileName, szPathName); + strcat_s(szFileName, wfd.cFileName); + FindAllImages(szFileName, vImgPaths, bSubFolder); + } + } + else + { + if (strlen(wfd.cFileName) >= 5) + { + char *ext3 = wfd.cFileName + strlen(wfd.cFileName) - 3; + char *ext4 = ext3 - 1; + if (_stricmp(ext3, "bmp") == 0 + || _stricmp(ext3, "jpg") == 0 + || _stricmp(ext3, "JPG") == 0 + || _stricmp(ext4, "jpeg") == 0 + || _stricmp(ext4, "JPEG") == 0 + || _stricmp(ext3, "png") == 0 + || _stricmp(ext3, "gif") == 0) + { + //printf("%s\n", wfd.cFileName); + + char filename[256]; + sprintf_s(filename, "%s%s", szPathName, wfd.cFileName); + vImgPaths.push_back(filename); + } + } + } + } while (FindNextFile(hFind, &wfd) != 0); + } +#else + + DIR* pDir = NULL; + struct dirent* ent = NULL; + + pDir = opendir(folder); + if (pDir == 0) + { + printf("open folder(%s) FAIL\n", folder); + return; + } + + std::string strFolder = folder; + if (strFolder[strFolder.size() - 1] != '/') + strFolder += "/"; + + while (ent = readdir(pDir)) + { + if (ent->d_type & DT_DIR) + continue; + + int len = (int)strlen(ent->d_name); + if (len >= 5 && strcmp(ent->d_name + len - 4, ".jpg") == 0) + { + char filepath[256]; + sprintf(filepath, "%s%s", strFolder.c_str(), ent->d_name); + vImgPaths.push_back(filepath); + } + } + + closedir(pDir); + +#endif + +} + + +string int2str(int val) +{ + char buf[64] = ""; + _snprintf(buf, sizeof(buf) - 1, "%d", val); + return buf; +} + +int str2int(const string &val) +{ + return atoi(val.c_str()); +} + + +string GetPredictString(const vector& fm, int idxBlank, const vector& labels) +{ + string str; + for (size_t t = 0; t < fm.size(); t++) + { + int idx = t; + int label = (int)fm[idx] + 0.5f; + if (label >= 0 && label != idxBlank) + { + //str += ("*" + labels[label] + "(" + int2str(label) + ")*"); + str += labels[label]; + } + } + return str; +} + + +float GetCTCLoss(float*activations, int timesteps, int alphabet_size, int blank_index_, + const string& strlabel, const std::map& mapLabel2Idx) +{ + size_t workspace_alloc_bytes_; + + ctcOptions options; + options.loc = CTC_CPU; + options.num_threads = 8; + options.blank_label = blank_index_; + + int len = strlabel.size(); + ctcStatus_t status = CTC::get_workspace_size(&len, + ×teps, + alphabet_size, + 1, + options, + &workspace_alloc_bytes_); + //CHECK_EQ(status, CTC_STATUS_SUCCESS) << "CTC Error: " << ctcGetStatusString(status); + vector workspace_(workspace_alloc_bytes_); + + vector flat_labels; + for (size_t i = 0; i < strlabel.size(); i++) + { + map::const_iterator it = mapLabel2Idx.find(strlabel[i]); + if (it != mapLabel2Idx.end()) + flat_labels.push_back(it->second); + } + if (flat_labels.size() != strlabel.size()) + return 0; + float cost = 0; + status = CTC::compute_ctc_loss_cpu(activations, + 0, + flat_labels.data(), + &len, + ×teps, + alphabet_size, + 1, + &cost, + workspace_.data(), + options + ); + return cost; +} + +void test_ocr_english(const string& imgfolder, const string& modelfolder, const string& lexiconfile) +{ +#ifdef CPU_ONLY + bool usegpu = false; +#else + bool usegpu = true; +#endif + + //load model + ICNNPredict* pCNN = CreatePredictInstance(modelfolder.c_str(), usegpu); + int wstd = 0, hstd = 0; + pCNN->GetInputImageSize(wstd, hstd); + + //get alphabet + vector alphabets = pCNN->GetLabels(); + + int idxBlank = 0; + vector::const_iterator it = find(alphabets.begin(), alphabets.end(), "blank"); + if (it != alphabets.end()) + idxBlank = (int)(it - alphabets.begin()); + + + map mapLabel2IDs; + for (size_t i = 0; i < alphabets.size(); i++) + { + wchar_t c = 0; + if (alphabets[i] == "blank") + continue; + wstring wlabel = string2wstring(alphabets[i], true); + mapLabel2IDs.insert(make_pair(wlabel[0], i)); + } + + + //init BK-tree of lexicon + printf("init BK-tree of lexicon\n"); + BKTree* pBKtree = bktree_new(levenshtein_distance); + ifstream fslexicon(lexiconfile); + + int n = 0; + int caseoffset = 'A' - 'a'; + string line; + + while (getline(fslexicon, line)) + { + if (line.size() == 0) + continue; + //if(line[line.size()-1]=='\t') + bktree_add(pBKtree, const_cast(line.c_str()), line.size()); + n++; + if (GetUppercaseNum(line) == 0)//ȫСдģתɴдӣתĸд + { + line[0] += caseoffset; + bktree_add(pBKtree, const_cast(line.c_str()), line.size()); + n++; + if (line.size() > 1) + { + for (size_t i = 1; i < line.size(); i++) + { + if (line[i] >= 'a' && line[i] <= 'z') + line[i] += caseoffset; + } + bktree_add(pBKtree, const_cast(line.c_str()), line.size()); + n++; + } + } + printf("\r%d", n); + } + printf("\n"); + + + int sumspend = 0; + int nok_lexicon = 0; + int nok_nolexicon = 0; + + vector imgs; + FindAllImages(imgfolder.c_str(), imgs, false); + + + + for (size_t i=0;i shape; + vector pred = pCNN->GetOutputFeatureMap(img, shape); + + int end = clock(); + sumspend += (end - start); + + + string strpredict0 = GetPredictString(pred, idxBlank, alphabets); + + printf("[%d/%d]%s\n\torig result: %s\n",i+1,imgs.size(),imgs[i].c_str(), strpredict0.c_str()); + + string strpredict = strpredict0; + + + int dist = std::min(2, (int)strpredict0.size() / 3); + vector< BKResult> ress = bktree_query(pBKtree, const_cast(strpredict0.c_str()), strpredict0.size(), dist); + + float min_ctc_loss = 1000; + vector outshape; + vector activitas = pCNN->GetLayerFeatureMaps("fc1x", outshape);; + int timesteps = outshape[0]; + int min_ctc_idx = -1; + for (size_t j = 0; j < ress.size(); j++) + { + float ctcloss = GetCTCLoss(activitas.data(), timesteps, alphabets.size(), idxBlank, ress[j].str, mapLabel2IDs); +#ifdef _DEBUG + printf("%s, ctc loss=%f\n", ress[j].str.c_str(), ctcloss); +#endif + if (ctcloss < min_ctc_loss) + { + min_ctc_loss = ctcloss; + min_ctc_idx = (int)j; + } + } + + if (ress.size()>0 && min_ctc_idx >= 0) + printf("\tdic result: %s\n", ress[min_ctc_idx].str.c_str()); + + //printf("%d, mean spend=%.2f", i+1, sumspend / (float)(i+1)); + + } + + + bktree_destroy(pBKtree); + +} + + +void test_ocr_chinese(const string& imgfolder, const string& modelfolder) +{ +#ifdef CPU_ONLY + bool usegpu = false; +#else + bool usegpu = true; +#endif + + //load model + ICNNPredict* pCNN = CreatePredictInstance(modelfolder.c_str(), usegpu); + int wstd = 0, hstd = 0; + pCNN->GetInputImageSize(wstd, hstd); + + //get alphabet + vector alphabets = pCNN->GetLabels(); + + int idxBlank = 0; + vector::const_iterator it = find(alphabets.begin(), alphabets.end(), "blank"); + if (it != alphabets.end()) + idxBlank = (int)(it - alphabets.begin()); + + + map mapLabel2IDs; + for (size_t i = 0; i < alphabets.size(); i++) + { + wchar_t c = 0; + if (alphabets[i] == "blank") + continue; + wstring wlabel = string2wstring(alphabets[i], true); + mapLabel2IDs.insert(make_pair(wlabel[0], i)); + } + + + int sumspend = 0; + int nok_lexicon = 0; + int nok_nolexicon = 0; + + vector imgs; + FindAllImages(imgfolder.c_str(), imgs, false); + + for (size_t i = 0; i < imgs.size(); i++) + { + string imgfile = imgs[i]; + cv::Mat img = cv::imread(imgfile, CV_LOAD_IMAGE_COLOR); + int w = img.cols, h = img.rows; + if (2 * w <= h) + { + cv::transpose(img, img); + cv::flip(img, img, 1); + w = img.cols, h = img.rows; + } + + + //int w1 = hstd*w / h; + int w1 = 280; + if (w1 != w && h != hstd) + cv::resize(img, img, cv::Size(w1, hstd)); + + int start = clock(); + + vector shape; + vector pred = pCNN->GetOutputFeatureMap(img, shape); + + int end = clock(); + sumspend += (end - start); + + string strpredict0 = GetPredictString(pred, idxBlank, alphabets); + + printf("[%d/%d]%s: %s\n", i + 1, imgs.size(), imgs[i].c_str(), strpredict0.c_str()); + + cv::namedWindow("img",0); + cv::imshow("img", img); + cv::waitKey(); + + } +} + + +int rec_test_main() +{ +#if 0 + string imgfolder = "I:\\OCR_Line\\synth_english\\db_read_test\\"; + string modelfolder = "I:\\OCR_Line\\synth_english\\crnn\\crnn_256\\"; + string lexiconfile = "I:\\OCR_Line\\synth_english\\lexicon.txt"; + + test_ocr_english(imgfolder, modelfolder, lexiconfile); + +#else + //string imgfolder = "F:\\plate_card_rec\\data\\multilabel_fcn\\right_vision1\\"; + string imgfolder = "C:\\plate_card_BLSTM\\vs2013_caffe_BN_multi_label_kenel_w\\water_meter_caffe_old\\face\\"; + string modelfolder = ".\\plateCard_test\\"; + test_ocr_chinese(imgfolder, modelfolder); +#endif +} + + + +/////////////////////////////// +#include +#include "boost/make_shared.hpp" +#include "caffe-gpu.h" + +using namespace caffe; + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +#ifndef HAAR_CASCADE +#include "common.h" +#include "classifierCaffe.h" +#include "opencv2/opencv.hpp" + + +typedef std::pair Pred; +struct result_plate { + CvRect coordinate; + vector one_char; +}; + + +_declspec(dllexport) bool rec_char(Classifier &net, cv::Mat src, int& predict, double& loss) { + + vector predictions = net.Classify(src); + + /* Print the top N predictions. */ + /*for (size_t i = 0; i < predictions.size(); ++i) { + std::cout << predictions[i] << " "; + } + std::cout << std::endl << std::endl;*/ + + int N = 1; + std::vector maxN = Argmax(predictions, N); + for (int i = 0; i < N; ++i) { + predict = maxN[i]; + loss = predictions[predict]; + } + + + return true; +} + + +#endif // DEBUG + + + +#define showSteps 0 + +using namespace std; +char * configFile = "config.txt"; + + +char* trainSetPosPath = (char *)malloc(200 * sizeof(char)); +char* templateName = (char *)malloc(200 * sizeof(char)); +int frame_width = 640; +int frame_height = 480; +char *model_file = (char *)malloc(200 * sizeof(char)); +char *trained_file = (char *)malloc(200 * sizeof(char)); +int label_file = 256; + +void readConfig(char* configFile, char* trainSetPosPath) { + fstream f; + char cstring[1000]; + int readS = 0; + f.open(configFile, fstream::in); + char param1[200]; strcpy(param1, ""); + char param2[200]; strcpy(param2, ""); + char param3[200]; strcpy(param3, ""); + + //--ȡһУ-- + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %s", param1, param2, param3); + strcpy(trainSetPosPath, param3); + + //--ȡ2У-- Ա + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %s", param1, param2, param3); + strcpy(templateName, param3); + + //--ȡ3У-- + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %d", param1, param2, &frame_width); + + //--ȡ4У-- + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %d", param1, param2, &frame_height); + + //--ȡ5У-- ѵģ + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %s", param1, param2, param3); + strcpy(model_file, param3); + + //--ȡ6У-- ѵȨ + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %s", param1, param2, param3); + strcpy(trained_file, param3); + + //--ȡ6У-- + f.getline(cstring, sizeof(cstring)); + readS = sscanf(cstring, "%s %s %d", param1, param2, &label_file); +} + +//config.txtĸĿ¼µеļĿ¼-- +// Ŀ¼־labelĿ¼ļΪlabelڵѵ--- +vector imgNames; +vector imgLists; +vector imgLabels; +int labelTemp = 0; + +void dfsFolder(string folderPath) { + _finddata_t FileInfo; + string strfind = folderPath + "\\*"; + long long Handle = _findfirst(strfind.c_str(), &FileInfo); + if (Handle == -1L) + { + cerr << "can not match the folder path" << endl; + exit(-1); + } + do { + //жǷĿ¼-- + if (FileInfo.attrib & _A_SUBDIR) { + // cout<> str; + + string finalList = FileInfo.name; + imgLists.push_back(finalList); + + } + } while (_findnext(Handle, &FileInfo) == 0); + _findclose(Handle); + +} + +void initTrainImage() { + readConfig(configFile, trainSetPosPath); + + string folderPath = trainSetPosPath; + // string folderPath = "H:\\char\\poker_rec_char_equalist_test"; + dfsFolder(folderPath); +} + + +//////////////////////////////////////////// + + + + +int detect_test_main(int argc, char **argv) { + initTrainImage(); + + /*::google::InitGoogleLogging(argv[0]);*/ + double threshold[3] = { 0.7, 0.5, 0.3 }; + double factor = 0.709; + int minSize = 40; + std::string proto_model_dir = argv[1]; + MTCNN *detector = new MTCNN(proto_model_dir); + int imgNum = imgNames.size(); + for (int iNum = 0; iNum < imgNum; iNum++) { + + cout << endl << iNum << " " << imgNames[iNum].c_str() << endl; + cv::VideoCapture capture(imgNames[iNum].c_str()); + //cv::VideoCapture capture(0); + //capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280); + //capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720); + // + //VideoCapture capture("F:\\MTCNN-master\\vs2013_caffe_BN_multi_label\\water_meter_caffe_old\\\img\\1.avi"); + //Ƿ:ɹʱisOpenedture + if (!capture.isOpened()) + cout << "fail to open!" << endl; + //ȡ֡ + // long totalFrameNumber = capture.get(CV_CAP_PROP_FRAME_COUNT); + // cout << "Ƶ" << totalFrameNumber << "֡" << endl; + // + // + //ÿʼ֡() + // long frameToStart = 300; + // capture.set(CV_CAP_PROP_POS_FRAMES, frameToStart); + // cout << "ӵ" << frameToStart << "֡ʼ" << endl; + // + // + //ý֡ + // int frameToStop = 400000; + // + // if (frameToStop < frameToStart) + // { + // cout << "֡Сڿʼ֡󣬼˳" << endl; + // return -1; + // } + // else + // { + // cout << "֡Ϊ" << frameToStop << "֡" << endl; + // } + // + // + // ȡ֡ + // double rate = capture.get(CV_CAP_PROP_FPS); + // cout << "֡Ϊ:" << rate << endl; + + + + //һƶȡƵѭı + bool stop = false; + + //ʾÿһ֡Ĵ + cv::namedWindow("Extracted frame_", 0); + //֡ļʱ: + //int delay = 1000/rate; + // int delay = 1000 / rate; + // if (rate == 0) delay = 1; + // + //whileѭȡ֡ + //currentFrameѭпƶȡָ֡ѭı + // long currentFrame = frameToStart; + // + // VideoWriter writer; + // writer.open("../result/SuicideSquad.mp4",CV_FOURCC('M', 'J', 'P', 'G'), 25, Size(1280,720), true); + //ÿһ֡ͼ + + cv::Mat image; + int frame_count = 0; + while (!stop) + { + //ȡһ֡ + if (!capture.read(image)) + { + cout << "ȡƵʧ" << endl; + stop = true; + continue; + } + + //imshow("Live", image); + //waitKey(0); + //for (int i = 0; i < 100; i++)capture.read(image); + // + //cv::flip(image, image,-1); + + std::vector faceInfo; + clock_t t1 = clock(); + // std::cout << "Detect " << image.rows << "X" << image.cols; + // + //image = image.t(); + + detector->Detect(image, faceInfo, minSize, threshold, factor); +#ifdef CPU_ONLY + std::cout << " Time Using CPU: " << (clock() - t1)*1.0 / 1000 << std::endl; +#else + std::cout << " Time Using GPU-CUDNN: " << (clock() - t1)*1.0 / 1000 << std::endl; +#endif + cv::Mat dst_face; + + for (int i = 0; i < faceInfo.size(); i++) { + + float x = faceInfo[i].bbox.x1; + float y = faceInfo[i].bbox.y1; + float w = faceInfo[i].bbox.x2 - faceInfo[i].bbox.x1 + 1; + float h = faceInfo[i].bbox.y2 - faceInfo[i].bbox.y1 + 1; + + if (x < 0) x = 0; if (y < 0) y = 0; + if ((y + h) > image.rows) h = image.rows - y; + if ((x + w) > image.cols) w = image.cols - x; + if (w < 0) continue; + if (h < 0) continue; + + std::cout << x << " " << y << " " << w << " " << h << std::endl; + std::cout << image.rows << " " << image.cols << std::endl; + + dst_face = image(cv::Rect(x, y, w, h)); + char dst_name[100]; + _mkdir("face"); + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, ".jpg"); + cv::imwrite(dst_name, dst_face); + + + FacePts facePts = faceInfo[i].facePts; + + + + + + cv::Point2f srcTri[4]; + cv::Point2f dstTri[4]; + cv::Mat rot_mat(2, 4, CV_32FC1); + cv::Mat warp_mat(2, 4, CV_32FC1); + + for (int j = 0; j < 4; j++) { + srcTri[j] = cv::Point2f(facePts.x[j] - x, facePts.y[j] - y); + } + + int padding_x = cvFloor(h * 0.04 * 5); + int padding_y = cvFloor(h * 0.04 * 2); + int x0 = 0; int y0 = 0; + int x1 = 120; int y1 = 0; + int x2 = 120; int y2 = 48; + int x3 = 0; int y3 = 48; + + dstTri[0] = cv::Point2f(x0 + padding_x, y0 + padding_y); + dstTri[1] = cv::Point2f(x1 + padding_x, y1 + padding_y); + dstTri[2] = cv::Point2f(x2 + padding_x, y2 + padding_y); + dstTri[3] = cv::Point2f(x3 + padding_x, y3 + padding_y); + + warp_mat = cv::getAffineTransform(srcTri, dstTri); + cv::Mat warp_dstImage = cv::Mat::zeros(48 + 2 * padding_y, 120 + 2 * padding_x, dst_face.type()); + cv::warpAffine(dst_face, warp_dstImage, warp_mat, warp_dstImage.size()); + +#ifdef showSteps + cv::namedWindow("dst_face", 0); + cv::imshow("dst_face", dst_face); + + cv::namedWindow("warp_dstImage", 0); + cv::imshow("warp_dstImage", warp_dstImage); + + cv::waitKey(1); +#endif + + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, "_warp_dstImage_.jpg"); + cv::imwrite(dst_name, warp_dstImage); + +#ifdef warpPerspective + + int img_height = dst_face.rows; + int img_width = dst_face.cols; + + vector corners(4); + + for (int j = 0; j < 4; j++) { + corners[j] = cv::Point2f(facePts.x[j] - x, facePts.y[j] - y); + } + + vector corners_trans(4); + corners_trans[0] = cv::Point2f(x0 + padding_x, y0 + padding_y); + corners_trans[1] = cv::Point2f(x1 + padding_x, y1 + padding_y); + corners_trans[2] = cv::Point2f(x2 + padding_x, y2 + padding_y); + corners_trans[3] = cv::Point2f(x3 + padding_x, y3 + padding_y); + + getStartTime(); + Mat transform = getPerspectiveTransform(corners, corners_trans); + //cout << transform << endl; + Mat resultImage; + warpPerspective(dst_face, resultImage, transform, Size(120 + 2 * padding_x, 48 + 2 * padding_y), INTER_LINEAR); + + getEndTime(); + std::cout << " 2 :" << dfTim << std::endl; + + namedWindow("warpPerspective", 0); + imshow("warpPerspective", resultImage); + + cv::waitKey(1); + + + + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, "_resultImage_.jpg"); + cv::imwrite(dst_name, resultImage); + +#endif + + + + + +#ifdef showSteps + for (int j = 0; j < 4; j++) { + std::cout << facePts.x[j] - x << " " << facePts.y[j] - y << std::endl; + cv::circle(image, cv::Point(facePts.x[j], facePts.y[j]), 1, cv::Scalar(255, 255, 0), 2); + } + + cv::rectangle(image, cv::Rect(x, y, w, h), cv::Scalar(255, 0, 0), 2); + std::cout << x << " " << y << " " << w << " " << h << std::endl; +#endif + } + + cv::imshow("Extracted frame_", image); + + int c = cv::waitKey(0); + //ESCߵָĽ֡˳ȡƵ + if ((char)c == 27 /*|| currentFrame > frameToStop*/) + { + stop = true; + } + //°ͣڵǰ֡ȴһΰ + //if (c >= 0) + //{ + // waitKey(0); + //} + // currentFrame++; + + } + + //رƵļ + capture.release(); + } + return 0; +} + + +int main(int argc, char **argv) { + + + + double threshold[3] = { 0.7, 0.8, 0.8 }; + double factor = 0.709; + int minSize = 40; + std::string proto_model_dir = "..\\..\\..\\vs2013_caffe_BN_multi_label_kenel_w\\model_platecar\\"; + MTCNN *detector = new MTCNN(proto_model_dir); + + + + +#ifdef CPU_ONLY + bool usegpu = false; +#else + bool usegpu = true; +#endif + + //load model + string modelfolder = ".\\plateCard_test\\"; + ICNNPredict* pCNN = CreatePredictInstance(modelfolder.c_str(), usegpu); + int wstd = 0, hstd = 0; + pCNN->GetInputImageSize(wstd, hstd); + + //get alphabet + vector alphabets = pCNN->GetLabels(); + + int idxBlank = 0; + vector::const_iterator it = find(alphabets.begin(), alphabets.end(), "blank"); + if (it != alphabets.end()) + idxBlank = (int)(it - alphabets.begin()); + + + map mapLabel2IDs; + for (size_t i = 0; i < alphabets.size(); i++) + { + wchar_t c = 0; + if (alphabets[i] == "blank") + continue; + wstring wlabel = string2wstring(alphabets[i], true); + mapLabel2IDs.insert(make_pair(wlabel[0], i)); + } + + + int sumspend = 0; + int nok_lexicon = 0; + int nok_nolexicon = 0; + + + + + initTrainImage(); + int imgNum = imgNames.size(); + int *idx = new int[imgNum]; + + for (int i = 0; i < imgNum; i++) { + idx[i] = i; + } + + RandomizeIdx(idx, imgNum); + for (int i = 0; i < imgNum; i++) { + int iNum = idx[i]; + cout << endl << iNum << " " << imgNames[iNum].c_str() << endl; + cv::VideoCapture capture(imgNames[iNum].c_str()); + //cv::VideoCapture capture(0); + //capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280); + //capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720); + // + //VideoCapture capture("F:\\MTCNN-master\\vs2013_caffe_BN_multi_label\\water_meter_caffe_old\\\img\\1.avi"); + //Ƿ:ɹʱisOpenedture + if (!capture.isOpened()) + cout << "fail to open!" << endl; + //ȡ֡ + // long totalFrameNumber = capture.get(CV_CAP_PROP_FRAME_COUNT); + // cout << "Ƶ" << totalFrameNumber << "֡" << endl; + // + // + //ÿʼ֡() + // long frameToStart = 300; + // capture.set(CV_CAP_PROP_POS_FRAMES, frameToStart); + // cout << "ӵ" << frameToStart << "֡ʼ" << endl; + // + // + //ý֡ + // int frameToStop = 400000; + // + // if (frameToStop < frameToStart) + // { + // cout << "֡Сڿʼ֡󣬼˳" << endl; + // return -1; + // } + // else + // { + // cout << "֡Ϊ" << frameToStop << "֡" << endl; + // } + // + // + // ȡ֡ + // double rate = capture.get(CV_CAP_PROP_FPS); + // cout << "֡Ϊ:" << rate << endl; + + + + //һƶȡƵѭı + bool stop = false; + + //ʾÿһ֡Ĵ + + //֡ļʱ: + //int delay = 1000/rate; + // int delay = 1000 / rate; + // if (rate == 0) delay = 1; + // + //whileѭȡ֡ + //currentFrameѭпƶȡָ֡ѭı + // long currentFrame = frameToStart; + // + // VideoWriter writer; + // writer.open("../result/SuicideSquad.mp4",CV_FOURCC('M', 'J', 'P', 'G'), 25, Size(1280,720), true); + //ÿһ֡ͼ + + cv::Mat image; + int frame_count = 0; + while (!stop) + { + //ȡһ֡ + if (!capture.read(image)) + { + cout << "ȡƵʧ" << endl; + stop = true; + continue; + } + + //imshow("Live", image); + //waitKey(0); + //for (int i = 0; i < 100; i++)capture.read(image); + //// + //cv::flip(image, image,-1); + + std::vector faceInfo; + clock_t t1 = clock(); + // std::cout << "Detect " << image.rows << "X" << image.cols; + // + //image = image.t(); + + + cv::Mat mergeImg;//ϲͼ + //洢ͨͼƬ + vector splitBGR(image.channels()); + //ָͨ洢splitBGR + split(image, splitBGR); + //Ըֱֱͨͼ⻯ + for (int i = 0; iDetect(mergeImg, faceInfo, minSize, threshold, factor); +#ifdef CPU_ONLY + std::cout << " Time Using CPU: " << (clock() - t1)*1.0 / 1000 << std::endl; +#else + std::cout << " Time Using : " << (clock() - t1)*1.0 / 1000 << std::endl; +#endif + cv::Mat dst_face; + + for (int i = 0; i < faceInfo.size(); i++) { + + float x = faceInfo[i].bbox.x1; + float y = faceInfo[i].bbox.y1; + float w = faceInfo[i].bbox.x2 - faceInfo[i].bbox.x1 + 1; + float h = faceInfo[i].bbox.y2 - faceInfo[i].bbox.y1 + 1; + + std::cout << "[" << i << "]÷֣ " << faceInfo[i].bbox.score << std::endl; + + if (x < 0) x = 0; if (y < 0) y = 0; + if ((y + h) > image.rows) h = image.rows - y; + if ((x + w) > image.cols) w = image.cols - x; + if (w < 0) continue; + if (h < 0) continue; + + //std::cout << x << " " << y << " " << w << " " << h << std::endl; + //std::cout << image.rows << " " << image.cols << std::endl; + + dst_face = image(cv::Rect(x, y, w, h)); + char dst_name[100]; + _mkdir("C:\\plate_card_BLSTM\\testData\\face\\"); + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "C:\\plate_card_BLSTM\\testData\\face\\", iNum, "_", frame_count++, "_", i, ".jpg"); + cv::imwrite(dst_name, dst_face); + + + FacePts facePts = faceInfo[i].facePts; + + + + + + cv::Point2f srcTri[4]; + cv::Point2f dstTri[4]; + cv::Mat rot_mat(2, 4, CV_32FC1); + cv::Mat warp_mat(2, 4, CV_32FC1); + + for (int j = 0; j < 4; j++) { + srcTri[j] = cv::Point2f(facePts.x[j] - x, facePts.y[j] - y); + } + + int padding_x = cvFloor(h * 0.04 * 5); + int padding_y = cvFloor(h * 0.04 * 2); + int x0 = 0; int y0 = 0; + int x1 = 120; int y1 = 0; + int x2 = 120; int y2 = 48; + int x3 = 0; int y3 = 48; + + dstTri[0] = cv::Point2f(x0 + padding_x, y0 + padding_y); + dstTri[1] = cv::Point2f(x1 + padding_x, y1 + padding_y); + dstTri[2] = cv::Point2f(x2 + padding_x, y2 + padding_y); + dstTri[3] = cv::Point2f(x3 + padding_x, y3 + padding_y); + + warp_mat = cv::getAffineTransform(srcTri, dstTri); + cv::Mat warp_dstImage = cv::Mat::zeros(48 + 2 * padding_y, 120 + 2 * padding_x, dst_face.type()); + cv::warpAffine(dst_face, warp_dstImage, warp_mat, warp_dstImage.size()); + +#ifdef showSteps + cv::namedWindow("dst_face", 0); + cv::imshow("dst_face", dst_face); + + cv::namedWindow("warp_dstImage", 0); + cv::imshow("warp_dstImage", warp_dstImage); + + cv::waitKey(1); +#endif + + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, "_warp_dstImage_.jpg"); + cv::imwrite(dst_name, warp_dstImage); + +#ifdef warpPerspective + + int img_height = dst_face.rows; + int img_width = dst_face.cols; + + vector corners(4); + + for (int j = 0; j < 4; j++) { + corners[j] = cv::Point2f(facePts.x[j] - x, facePts.y[j] - y); + } + + vector corners_trans(4); + corners_trans[0] = cv::Point2f(x0 + padding_x, y0 + padding_y); + corners_trans[1] = cv::Point2f(x1 + padding_x, y1 + padding_y); + corners_trans[2] = cv::Point2f(x2 + padding_x, y2 + padding_y); + corners_trans[3] = cv::Point2f(x3 + padding_x, y3 + padding_y); + + getStartTime(); + Mat transform = getPerspectiveTransform(corners, corners_trans); + //cout << transform << endl; + Mat resultImage; + warpPerspective(dst_face, resultImage, transform, Size(120 + 2 * padding_x, 48 + 2 * padding_y), INTER_LINEAR); + + getEndTime(); + std::cout << " 2 :" << dfTim << std::endl; + + namedWindow("warpPerspective", 0); + imshow("warpPerspective", resultImage); + + cv::waitKey(1); + + + + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, "_resultImage_.jpg"); + cv::imwrite(dst_name, resultImage); + +#endif + cv::Mat img = warp_dstImage; + cv::normalize(img, img, 255, 0, cv::NORM_MINMAX); + int w_rec = img.cols, h_rec = img.rows; + if (2 * w_rec <= h_rec) + { + cv::transpose(img, img); + cv::flip(img, img, 1); + w_rec = img.cols, h_rec = img.rows; + } + + + //int w1 = hstd*w / h; + int w1 = 280; + if (w1 != w_rec && h_rec != hstd) + cv::resize(img, img, cv::Size(w1, hstd), cv::INTER_CUBIC); + + int start = clock(); + + vector shape; + vector pred = pCNN->GetOutputFeatureMap(img, shape); + + int end = clock(); + sumspend += (end - start); + + string strpredict0 = GetPredictString(pred, idxBlank, alphabets); + + printf("[%d]%s\n", i, strpredict0.c_str()); +#ifdef showSteps + cv::namedWindow("img", 0); + cv::imshow("img", img); + cv::waitKey(1); +#endif + + +#ifdef showSteps + for (int j = 0; j < 4; j++) { + // std::cout << facePts.x[j] - x << " " << facePts.y[j] - y << std::endl; + cv::circle(image, cv::Point(facePts.x[j], facePts.y[j]), 1, cv::Scalar(255, 255, 0), 2); + } + + cv::rectangle(image, cv::Rect(x, y, w, h), cv::Scalar(255, 0, 0), 2); + //std::cout << x << " " << y << " " << w << " " << h << std::endl; +#endif + } + + if (faceInfo.size() == 0) { + cv::Mat img = image; + //cv::normalize(img, img, 255, 0, cv::NORM_MINMAX); + int w_rec = img.cols, h_rec = img.rows; + if (2 * w_rec <= h_rec) + { + cv::transpose(img, img); + cv::flip(img, img, 1); + w_rec = img.cols, h_rec = img.rows; + } + + + //int w1 = hstd*w / h; + int w1 = 280; + if (w1 != w_rec && h_rec != hstd) + cv::resize(img, img, cv::Size(w1, hstd),cv::INTER_CUBIC); + + int start = clock(); + + vector shape; + vector pred = pCNN->GetOutputFeatureMap(img, shape); + + int end = clock(); + sumspend += (end - start); + + string strpredict0 = GetPredictString(pred, idxBlank, alphabets); + + printf("[%d]%s\n", iNum, strpredict0.c_str()); + + } + + + cv::namedWindow("Extracted frame_", 0); + cv::imshow("Extracted frame_", image); + + int c = cv::waitKey(0); + //ESCߵָĽ֡˳ȡƵ + if ((char)c == 27 /*|| currentFrame > frameToStop*/) + { + stop = true; + } + //°ͣڵǰ֡ȴһΰ + //if (c >= 0) + //{ + // waitKey(0); + //} + // currentFrame++; + + } + + //رƵļ + capture.release(); + } + return 0; + + +} + + +int main_mtcnn_haar(int argc, char **argv) { + + + + double threshold[3] = { 0.7, 0.8, 0.8 }; + double factor = 0.709; + int minSize = 40; + std::string proto_model_dir = "C:\\plate_card_BLSTM\\vs2013_caffe_BN_multi_label_kenel_w\\model_platecar\\"; + MTCNN *detector = new MTCNN(proto_model_dir); + +#ifndef HAAR_CASCADE + cv::CascadeClassifier char_cascade; + if (!char_cascade.load(".\\rec_test\\cascade12.xml")) + { + cerr << "ERROR: Could not load classifier cascade" << endl; + return -1; + } + + + string chinese_model_file = ".\\rec_test\\chinese\\model.prototxt"; + string chinese_trained_file = ".\\rec_test\\chinese\\model.caffemodel"; + string chinese_mean_file = ""; + int chinese_label_file = 31; + Classifier net_chinese(chinese_model_file, chinese_trained_file, chinese_mean_file, chinese_label_file); + + + string char_model_file = ".\\rec_test\\char\\model.prototxt"; + string char_trained_file = ".\\rec_test\\char\\model.caffemodel"; + string char_mean_file = ""; + int char_label_file = 37; + Classifier net_char(char_model_file, char_trained_file, char_mean_file, char_label_file); + +#endif // HAAR_CASCADE + + + + +#ifdef CPU_ONLY + bool usegpu = false; +#else + bool usegpu = true; +#endif + + //load model + string modelfolder = ".\\plateCard_test\\"; + ICNNPredict* pCNN = CreatePredictInstance(modelfolder.c_str(), usegpu); + int wstd = 0, hstd = 0; + pCNN->GetInputImageSize(wstd, hstd); + + //get alphabet + vector alphabets = pCNN->GetLabels(); + + int idxBlank = 0; + vector::const_iterator it = find(alphabets.begin(), alphabets.end(), "blank"); + if (it != alphabets.end()) + idxBlank = (int)(it - alphabets.begin()); + + + map mapLabel2IDs; + for (size_t i = 0; i < alphabets.size(); i++) + { + wchar_t c = 0; + if (alphabets[i] == "blank") + continue; + wstring wlabel = string2wstring(alphabets[i], true); + mapLabel2IDs.insert(make_pair(wlabel[0], i)); + } + + + int sumspend = 0; + int nok_lexicon = 0; + int nok_nolexicon = 0; + + + + + initTrainImage(); + int imgNum = imgNames.size(); + for (int iNum = 0; iNum < imgNum; iNum++) { + + cout << endl << iNum << " " << imgNames[iNum].c_str() << endl; + cv::VideoCapture capture(imgNames[iNum].c_str()); + //cv::VideoCapture capture(0); + //capture.set(CV_CAP_PROP_FRAME_WIDTH, 1280); + //capture.set(CV_CAP_PROP_FRAME_HEIGHT, 720); + // + //VideoCapture capture("F:\\MTCNN-master\\vs2013_caffe_BN_multi_label\\water_meter_caffe_old\\\img\\1.avi"); + //Ƿ:ɹʱisOpenedture + if (!capture.isOpened()) + cout << "fail to open!" << endl; + //ȡ֡ + // long totalFrameNumber = capture.get(CV_CAP_PROP_FRAME_COUNT); + // cout << "Ƶ" << totalFrameNumber << "֡" << endl; + // + // + //ÿʼ֡() + // long frameToStart = 300; + // capture.set(CV_CAP_PROP_POS_FRAMES, frameToStart); + // cout << "ӵ" << frameToStart << "֡ʼ" << endl; + // + // + //ý֡ + // int frameToStop = 400000; + // + // if (frameToStop < frameToStart) + // { + // cout << "֡Сڿʼ֡󣬼˳" << endl; + // return -1; + // } + // else + // { + // cout << "֡Ϊ" << frameToStop << "֡" << endl; + // } + // + // + // ȡ֡ + // double rate = capture.get(CV_CAP_PROP_FPS); + // cout << "֡Ϊ:" << rate << endl; + + + + //һƶȡƵѭı + bool stop = false; + + //ʾÿһ֡Ĵ + + //֡ļʱ: + //int delay = 1000/rate; + // int delay = 1000 / rate; + // if (rate == 0) delay = 1; + // + //whileѭȡ֡ + //currentFrameѭпƶȡָ֡ѭı + // long currentFrame = frameToStart; + // + // VideoWriter writer; + // writer.open("../result/SuicideSquad.mp4",CV_FOURCC('M', 'J', 'P', 'G'), 25, Size(1280,720), true); + //ÿһ֡ͼ + + cv::Mat image; + int frame_count = 0; + while (!stop) + { + //ȡһ֡ + if (!capture.read(image)) + { + cout << "ȡƵʧ" << endl; + stop = true; + continue; + } + + //imshow("Live", image); + //waitKey(0); + //for (int i = 0; i < 100; i++)capture.read(image); + //// + //cv::flip(image, image,-1); + + std::vector faceInfo; + clock_t t1 = clock(); + // std::cout << "Detect " << image.rows << "X" << image.cols; + // + //image = image.t(); + + + cv::Mat mergeImg;//ϲͼ + //洢ͨͼƬ + vector splitBGR(image.channels()); + //ָͨ洢splitBGR + split(image, splitBGR); + //Ըֱֱͨͼ⻯ + for (int i = 0; iDetect(mergeImg, faceInfo, minSize, threshold, factor); +#ifdef CPU_ONLY + std::cout << " Time Using CPU: " << (clock() - t1)*1.0 / 1000 << std::endl; +#else + std::cout << " Time Using : " << (clock() - t1)*1.0 / 1000 << std::endl; +#endif + cv::Mat dst_face; + +#ifndef HAAR_CASCADE + vector results; + vector(results).swap(results); + results.clear(); +#endif // DEBUG + + cv::Mat warp_dstImage; + for (int i = 0; i < faceInfo.size() ||(i==0 && faceInfo.size() ==0); i++) { + + if (faceInfo.size() != 0) { + + float x = faceInfo[i].bbox.x1; + float y = faceInfo[i].bbox.y1; + float w = faceInfo[i].bbox.x2 - faceInfo[i].bbox.x1 + 1; + float h = faceInfo[i].bbox.y2 - faceInfo[i].bbox.y1 + 1; + + std::cout << "[" << i << "]÷֣ " << faceInfo[i].bbox.score << std::endl; + + if (x < 0) x = 0; if (y < 0) y = 0; + if ((y + h) > image.rows) h = image.rows - y; + if ((x + w) > image.cols) w = image.cols - x; + if (w < 0) continue; + if (h < 0) continue; + + //std::cout << x << " " << y << " " << w << " " << h << std::endl; + //std::cout << image.rows << " " << image.cols << std::endl; + + dst_face = image(cv::Rect(x, y, w, h)); + char dst_name[100]; + _mkdir("C:\\plate_card_BLSTM\\testData\\face\\"); + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "C:\\plate_card_BLSTM\\testData\\face\\", iNum, "_", frame_count++, "_", i, ".jpg"); + cv::imwrite(dst_name, dst_face); + + + FacePts facePts = faceInfo[i].facePts; + + + + + + cv::Point2f srcTri[4]; + cv::Point2f dstTri[4]; + cv::Mat rot_mat(2, 4, CV_32FC1); + cv::Mat warp_mat(2, 4, CV_32FC1); + + for (int j = 0; j < 4; j++) { + srcTri[j] = cv::Point2f(facePts.x[j] - x, facePts.y[j] - y); + } + + int padding_x = cvFloor(h * 0.04 * 5); + int padding_y = cvFloor(h * 0.04 * 2); + int x0 = 0; int y0 = 0; + int x1 = 120; int y1 = 0; + int x2 = 120; int y2 = 48; + int x3 = 0; int y3 = 48; + + dstTri[0] = cv::Point2f(x0 + padding_x, y0 + padding_y); + dstTri[1] = cv::Point2f(x1 + padding_x, y1 + padding_y); + dstTri[2] = cv::Point2f(x2 + padding_x, y2 + padding_y); + dstTri[3] = cv::Point2f(x3 + padding_x, y3 + padding_y); + + warp_mat = cv::getAffineTransform(srcTri, dstTri); + /*cv::Mat */warp_dstImage = cv::Mat::zeros(48 + 2 * padding_y, 120 + 2 * padding_x, dst_face.type()); + cv::warpAffine(dst_face, warp_dstImage, warp_mat, warp_dstImage.size()); + +#ifdef showSteps + cv::namedWindow("dst_face", 0); + cv::imshow("dst_face", dst_face); + + cv::namedWindow("warp_dstImage", 0); + cv::imshow("warp_dstImage", warp_dstImage); + + cv::waitKey(1); +#endif + + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, "_warp_dstImage_.jpg"); + cv::imwrite(dst_name, warp_dstImage); + +#ifdef warpPerspective + + int img_height = dst_face.rows; + int img_width = dst_face.cols; + + vector corners(4); + + for (int j = 0; j < 4; j++) { + corners[j] = cv::Point2f(facePts.x[j] - x, facePts.y[j] - y); + } + + vector corners_trans(4); + corners_trans[0] = cv::Point2f(x0 + padding_x, y0 + padding_y); + corners_trans[1] = cv::Point2f(x1 + padding_x, y1 + padding_y); + corners_trans[2] = cv::Point2f(x2 + padding_x, y2 + padding_y); + corners_trans[3] = cv::Point2f(x3 + padding_x, y3 + padding_y); + + getStartTime(); + Mat transform = getPerspectiveTransform(corners, corners_trans); + //cout << transform << endl; + Mat resultImage; + warpPerspective(dst_face, resultImage, transform, Size(120 + 2 * padding_x, 48 + 2 * padding_y), INTER_LINEAR); + + getEndTime(); + std::cout << " 2 :" << dfTim << std::endl; + + namedWindow("warpPerspective", 0); + imshow("warpPerspective", resultImage); + + cv::waitKey(1); + + + + sprintf_s(dst_name, "%s%d%s%d%s%d%s", "face\\", iNum, "_", frame_count++, "_", i, "_resultImage_.jpg"); + cv::imwrite(dst_name, resultImage); + +#endif + +#ifdef showSteps + for (int j = 0; j < 4; j++) { + // std::cout << facePts.x[j] - x << " " << facePts.y[j] - y << std::endl; + cv::circle(image, cv::Point(facePts.x[j], facePts.y[j]), 1, cv::Scalar(255, 255, 0), 2); + } + + cv::rectangle(image, cv::Rect(x, y, w, h), cv::Scalar(255, 0, 0), 2); + //std::cout << x << " " << y << " " << w << " " << h << std::endl; +#endif + + } + else if (faceInfo.size() == 0) + { + warp_dstImage = image; + } +#ifndef HAAR_CASCADE + result_plate result; + cv::Rect face; + if (faceInfo.size() != 0) { + face = cv::Rect(faceInfo[i].bbox.x1, faceInfo[i].bbox.y1, faceInfo[i].bbox.x2 - faceInfo[i].bbox.x1, faceInfo[i].bbox.y2 - faceInfo[i].bbox.y1); + } + else if (faceInfo.size() == 0) + { + face = cv::Rect(0,0,image.cols,image.rows); + warp_dstImage = image; + } + result.coordinate = face; + + + cv::Mat detect_obj = warp_dstImage; + //cv::resize(detect_obj, detect_obj ,cv::Size(detect_obj.cols * 2, detect_obj.rows *2),cv::INTER_CUBIC); + + vector detectROI; + cv::Mat show_detect_obj; detect_obj.copyTo(show_detect_obj); + cv::Mat shaixuan_obj, choose_detect_obj, normalization_detect_obj; + shaixuan_obj = show_detect_obj.clone();//10-12 + choose_detect_obj = shaixuan_obj.clone(); + normalization_detect_obj = shaixuan_obj.clone(); + //###################################################################### + std::vector chars_roi; + char_cascade.detectMultiScale(detect_obj, chars_roi, 1.05, 1, 0 | CV_HAAR_SCALE_IMAGE/*, cv::Size(0, 0), cv::Size(1700, 1700)*/); + if (showSteps) + printf("chars_roi size = %d \n", chars_roi.size()); + + for (unsigned int j = 0; j < chars_roi.size(); j++) + { + const cv::Rect& single_char_roi = chars_roi[j]; + detectROI.push_back(single_char_roi); + } + + std::sort(detectROI.begin(), detectROI.end(), sort_by_x); + + if (showSteps) { + for (int i = 0; i < detectROI.size(); i++) { + cout << detectROI[i].x << " " << detectROI[i].y << " " << detectROI[i].width << " " << detectROI[i].height << endl; + + cv::Point tl(detectROI[i].x, detectROI[i].y); + cv::Point br = tl + cv::Point(detectROI[i].width, detectROI[i].height); + cv::Scalar magenta = cv::Scalar((i) * 10, 255 - (i + 1) * 10, (i + 2) * 50);//ɫѡȡ + + cv::rectangle(show_detect_obj, tl, br, magenta, 1, 1, 0); + } + + cv::namedWindow("show_detect_obj", 0); + cv::imshow("show_detect_obj", show_detect_obj); + + } + + ///**************** step 2.1 fix detected roi ******************************// + int avg_distance_of_chars = 0; + int ROIWIDTH = 0; //ƽ + int ROIHEIGHT = 0;//ƽ + + notfound = 1; + if (showSteps) + printf(" \n start \n"); + + vector ROI_choose_paixu = roichoose(detectROI, choose_detect_obj);//ѡʵĿ + + if (showSteps) + show_choose_step(shaixuan_obj, ROI_choose_paixu, "ROI_choose_paixu"); + if (showSteps) + printf("ʹѰҵĿΪ%d \n", ROI_choose_paixu.size()); + + if (notfound == 0 || detectROI.size() < 4) { + printf(" ⵽ijַΪ %d 4˳ ... \n", detectROI.size()); + continue; + } + + vector ROI_normalization = roinormalization(ROI_choose_paixu, normalization_detect_obj);//һ + + if (showSteps) + show_choose_step(shaixuan_obj, ROI_normalization, "ROI_normalization"); + + vector ROI_choose = roicomplete(ROI_normalization, normalization_detect_obj);// + + if (showSteps) + show_choose_step(shaixuan_obj, ROI_choose, "ROI_choose"); + + std::sort(ROI_choose.begin(), ROI_choose.end(), sort_by_x); + + //show_choose_step(cv::Mat shaixuan_obj, vector ROI_choose, char* windName) + + ///************************** ʶɸѡ **************************/ + IplImage *detect = &IplImage(normalization_detect_obj); + if (ROI_choose.size() > 4) + { + ///?????????????????????? + + + if (ROI_choose.size() > 5) + { + + /*******************************************һַó*******************************/ + if (showSteps) + printf(": "); + if (ROI_choose.size() == 8)//ȥǰǺ(һʹóмĵλýʶڿرʱ򲻺жϣ + { + + if (ROI_choose[2].x - ROI_choose[1].x - ROI_choose[1].width > 0 && ROI_choose[3].x - ROI_choose[2].x - ROI_choose[2].width > 0) + { + if (showSteps) + printf("special"); + } + /* else if (ROI_choose[2].x - ROI_choose[1].x - ROI_choose[1].width > 0) // + ROI_choose.erase(ROI_choose.begin() + 7); + else if (ROI_choose[3].x - ROI_choose[2].x - ROI_choose[2].width > 0) + ROI_choose.erase(ROI_choose.begin());*/ + } + + /// ********************* ȥǰǺ(ʹʶȥloss׼ȷ ********************/// + if (ROI_choose.size() == 8) + { + int predict0, predict1, predict11, predict7, predict6; + predict0 = -1; + predict1 = -1; predict11 = -1; + predict7 = -1; + double loss0, loss1, loss11, loss7, loss6; + loss0 = 0.0; + loss11 = 0.0; + loss1 = 0.0; + loss7 = 0.0; + CvRect cut_single0 = ROI_choose[0]; + CvRect cut_single1 = ROI_choose[1]; + CvRect cut_single7 = ROI_choose[7]; + IplImage* img_single0_result = cvCreateImage(cvSize(cut_single0.width, cut_single0.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single0); + cvCopy(detect, img_single0_result); + cvResetImageROI(detect); + + IplImage* img_single1_result = cvCreateImage(cvSize(cut_single1.width, cut_single1.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single1); + cvCopy(detect, img_single1_result); + cvResetImageROI(detect); + + IplImage* img_single7_result = cvCreateImage(cvSize(cut_single7.width, cut_single7.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single7); + cvCopy(detect, img_single7_result); + cvResetImageROI(detect); + + IplImage* img_gray0 = cvCreateImage(cvGetSize(img_single0_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single0_result, img_gray0, CV_BGR2GRAY); + cv::Mat img_re0 = cv::cvarrToMat(img_gray0); + /*IplImage* img_re0 = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray0, img_re0);*/ + + IplImage* img_gray1 = cvCreateImage(cvGetSize(img_single1_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single1_result, img_gray1, CV_BGR2GRAY); + cv::Mat img_re1 = cv::cvarrToMat(img_gray1); + /*IplImage* img_re1 = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray1, img_re1);*/ + + IplImage* img_gray7 = cvCreateImage(cvGetSize(img_single7_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single7_result, img_gray7, CV_BGR2GRAY); + cv::Mat img_re7 = cv::cvarrToMat(img_gray7); + /*IplImage* img_re7 = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray7, img_re7);*/ + rec_char(net_chinese, img_re0, predict0, loss0); + rec_char(net_chinese, img_re1, predict1, loss1); + rec_char(net_char, img_re1, predict11, loss11); + rec_char(net_char, img_re7, predict7, loss7); + + //charRec0.recognise_form_memory_scale0_1(img_re0, &predict0, &loss0);//ʶһλǷǺ + //charRec0.recognise_form_memory_scale0_1(img_re1, &predict1, &loss1);//ڶλǷǺ + //charRec.recognise_form_memory_scale0_1(img_re1, &predict11, &loss11);//ڶλǷĸ + //charRec.recognise_form_memory_scale0_1(img_re7, &predict7, &loss7);//λǷĸ + + if (showSteps) + printf("loss0_cn=%f loss7_char=%f loss1_Chinese=%f loss1_char=%f \n", loss0, loss7, loss1, loss11); + if (loss11 > loss1) { + ROI_choose.erase(ROI_choose.begin() + 7); + } + else { + ROI_choose.erase(ROI_choose.begin()); + } + + + } + /// ***************************** //ʱҲԴ ***********************// + if (ROI_choose.size() == 9) + { + int predict1_cn, predict1_char, predict2_cn, predict2_char, predict3_cn, predict3_char = -1; + + double loss1_cn, loss1_char, loss2_cn, loss2_char, loss3_cn, loss3_char = 0.0; + + CvRect cut_single1 = ROI_choose[0]; + CvRect cut_single2 = ROI_choose[1]; + CvRect cut_single3 = ROI_choose[2]; + IplImage* img_single1_result = cvCreateImage(cvSize(cut_single1.width, cut_single1.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single1); + cvCopy(detect, img_single1_result); + cvResetImageROI(detect); + + IplImage* img_single2_result = cvCreateImage(cvSize(cut_single2.width, cut_single2.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single2); + cvCopy(detect, img_single2_result); + cvResetImageROI(detect); + + IplImage* img_single3_result = cvCreateImage(cvSize(cut_single3.width, cut_single3.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single3); + cvCopy(detect, img_single3_result); + cvResetImageROI(detect); + + IplImage* img_gray1 = cvCreateImage(cvGetSize(img_single1_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single1_result, img_gray1, CV_BGR2GRAY); + cv::Mat img_re1 = cv::cvarrToMat(img_gray1); + /*IplImage* img_re1 = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray1, img_re1);*/ + + IplImage* img_gray2 = cvCreateImage(cvGetSize(img_single2_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single2_result, img_gray2, CV_BGR2GRAY); + cv::Mat img_re2 = cv::cvarrToMat(img_gray2); + /*IplImage* img_re2 = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray2, img_re2);*/ + + IplImage* img_gray3 = cvCreateImage(cvGetSize(img_single3_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single3_result, img_gray3, CV_BGR2GRAY); + cv::Mat img_re3 = cv::cvarrToMat(img_gray3); + /*IplImage* img_re3 = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray3, img_re3);*/ + + + rec_char(net_chinese, img_re1, predict1_cn, loss1_cn); + rec_char(net_chinese, img_re2, predict2_cn, loss2_cn); + rec_char(net_chinese, img_re3, predict3_cn, loss3_cn); + rec_char(net_char, img_re1, predict1_char, loss1_char); + rec_char(net_char, img_re2, predict2_char, loss2_char); + rec_char(net_char, img_re3, predict3_char, loss3_char); + + //charRec0.recognise_form_memory_scale0_1(img_re1, &predict1_cn, &loss1_cn);//ʶһλǷǺ + //charRec0.recognise_form_memory_scale0_1(img_re2, &predict2_cn, &loss2_cn);//ڶλǷǺ + //charRec0.recognise_form_memory_scale0_1(img_re3, &predict3_cn, &loss3_cn);//λǷǺ + //charRec.recognise_form_memory_scale0_1(img_re1, &predict1_char, &loss1_char);//һλǷĸ + //charRec.recognise_form_memory_scale0_1(img_re2, &predict2_char, &loss2_char);//ڶλǷĸ + //charRec.recognise_form_memory_scale0_1(img_re3, &predict3_char, &loss3_char);//λǷĸ + + if (loss3_cn > loss3_char)//λΪʱǰλɾ + { + ROI_choose.erase(ROI_choose.begin()); + ROI_choose.erase(ROI_choose.begin()); + } + else if (loss2_cn > loss2_char) //ڶλΪʱǰɾһλ + { + ROI_choose.erase(ROI_choose.begin() + 8); + ROI_choose.erase(ROI_choose.begin()); + } + else //λڶλΪʱɾλ + { + ROI_choose.erase(ROI_choose.begin() + 8); + ROI_choose.erase(ROI_choose.begin() + 7); + } + + /*if (loss11 > loss1) { + ROI_choose.erase(ROI_choose.begin() + 7); + } + else { + ROI_choose.erase(ROI_choose.begin()); + }*/ + if (showSteps) { + printf("loss1Ϊroichooseĵһ \n"); + printf("loss1_cn=%f loss1_char=%f \n loss2_cn=%f loss2_char=%f \n loss3_cn=%f loss3_char=%f \n", loss1_cn, loss1_char, loss2_cn, loss2_char, loss3_cn, loss3_char); + } + } + if (showSteps) { + show_choose_step(shaixuan_obj, ROI_choose, "ROI_choose_final"); + //cvWaitKey(0); + } + + + ///***************************** ʶ *********************** 4-25жǷʹȫij*******************************/ + if (ROI_choose.size() >= 6) + { + + int use_fcn = 0; + int roiwidth = ROI_choose[0].width; + for (int i = 3; i < ROI_choose.size(); i++) + { + if (ROI_choose[i].x - roiwidth - ROI_choose[i - 1].x > 0.1*roiwidth) + use_fcn++; + } + //if (use_fcn >= 0) + //{ + // //printf("need use fcn \n"); + // + // + // std::vector predictions = classifier.Classify(detect_obj, 1); + // for (int i = 0; i < predictions.size(); i++) { + // std::cout << predictions[i].label << " "; + // cv::Rect roi = cv::Rect(predictions[i].centor.x - predictions[i].avg_width / 2, + // predictions[i].centor.y - predictions[i].avg_height / 2, + // predictions[i].avg_width, + // predictions[i].avg_height + // );//200*80ͼƬµĿ + // cv::Rect roi_final; + // roi_final.x = (roi.x*detect_obj.cols) / 200; + // roi_final.y = (roi.y*detect_obj.rows) / 80; + // roi_final.width = (roi.width*detect_obj.cols) / 200; + // roi_final.height = (roi.height*detect_obj.rows) / 80; + // + // cv::rectangle(detect_obj, roi_final, cv::Scalar(255, 0, 255), 1, 8, 0); + // } + // + // + // cv::namedWindow("src", 0); + // cv::imshow("src", detect_obj); + // cvWaitKey(); + // + //} + //if (use_fcn > 0) { + // + // _mkdir("use_fcn"); + // + // SYSTEMTIME stTime; + // GetLocalTime(&stTime); + // char pVideoName[256]; + // sprintf_s(pVideoName, 256, "\\%d_%d_%d_%d_%d_%d_%d", stTime.wYear, stTime.wMonth, stTime.wDay, stTime.wHour, stTime.wMinute, stTime.wSecond, stTime.wMilliseconds); + // char image_name1[500]; + // sprintf_s(image_name1, 500, "%s%s%s%s", "use_fcn", "\\", pVideoName, ".jpg");//ͼƬ + // imwrite(image_name1, img_src_orignal); + + // continue; + //} + /************************ʹfcn***************/ + if (1) { + for (int i = 0; i < ROI_choose.size(); i++) + { + CvRect cut_single = ROI_choose[i]; + IplImage* img_single_result = cvCreateImage(cvSize(cut_single.width, cut_single.height), detect->depth, detect->nChannels);//õֵַͼ + cvSetImageROI(detect, cut_single); + cvCopy(detect, img_single_result); + cvResetImageROI(detect); + + if (showSteps) + { + char windowsname[200]; + sprintf(windowsname, "%s%d", "detect", i);//ֱʾָͼ + cvNamedWindow(windowsname, 1); + cvShowImage(windowsname, img_single_result); + } + + IplImage* img_gray = cvCreateImage(cvGetSize(img_single_result), IPL_DEPTH_8U, 1);//Ŀͼ + cvCvtColor(img_single_result, img_gray, CV_BGR2GRAY); + cv::Mat img_re = cv::cvarrToMat(img_gray); + /*IplImage* img_re = cvCreateImage(cvSize(charRec.getWidth(), charRec.getHeight()), 8, charRec.getChannel()); + cvResize(img_gray, img_re);*/ + int predict = -1; + double loss = 0.0; + + if (i == 0) { + + rec_char(net_chinese, img_re, predict, loss); + /*charRec0.recognise_form_memory_scale0_2(img_re, &predict, &loss);*/ + + if (predict > 30) { + predict = -1; + } + + char* hanzi = outputhanzi(predict); + + Pred pred; + pred.first = cvRect(ROI_choose[i].x + face.x, ROI_choose[i].y + face.y, ROI_choose[i].width, ROI_choose[i].height); + pred.second = hanzi; + result.one_char.push_back(pred); + + if (showSteps) + printf("-loss=%f \n", loss); + + } + else { + rec_char(net_char, img_re, predict, loss); + /*if (predict == -1) + predict = 0;*/ + /*charRec.recognise_form_memory_scale0_1(img_re, &predict, &loss);*/ + + if ((i == 1) && (predict == 8)) + predict = 11; + char* szzm = outputplate(predict); + Pred pred; + pred.first = cvRect(ROI_choose[i].x + face.x, ROI_choose[i].y + face.y, ROI_choose[i].width, ROI_choose[i].height); + pred.second = szzm; + result.one_char.push_back(pred); + + + if (showSteps) + printf("predict=%d loss=%f \n", predict, loss); + } + + + + /*if (loss < 0.6) + predict = -1;*/ + } + } + } + } + + + /*if (showSteps) + { + cvDestroyAllWindows(); + }*/ + + } + + results.push_back(result); + //######################################################### + + +#endif // HAAR_CASCADE + + + + + } + + + + if (results.size() == NULL) { + continue; + } + + + IplImage *showcarplate; + if (faceInfo.size() == 0) + showcarplate = &IplImage(image); + else + showcarplate = &IplImage(warp_dstImage); + for (int num = 0; num < (int)results.size(); num++) + { + CvRect face = results[num].coordinate; + cvRectangle(showcarplate, cvPoint(face.x, face.y), cvPoint(face.x + face.width, face.y + face.height), CV_RGB(255, 255, 0), 3, 4, 0); + vector carplate = results[num].one_char; + ///*************************** ʾ *****************************/// + if (carplate.size() >= 7 && showdemo) + { + // CvxText text("simhei.ttf"); + char *strID = new char(100); strID[0] = '\0'; + char *strID_CN = new char(100); strID_CN[0] = '\0'; + + strcat(strID_CN, carplate[0].second); + + for (int i = 1; i < carplate.size(); i++) + { + strcat(strID, carplate[i].second); + } + + + CvScalar color; + color.val[0] = face.height *0.3; + color.val[1] = 0.5; + color.val[2] = 0.1; + color.val[3] = 0; + // text.restoreFont(); + + // text.setFont(NULL, &color, NULL, NULL); + if (faceInfo.size() == 0) { + cvRectangle(showcarplate, cvPoint(face.x - face.height*0.1, face.y - 5), cvPoint(face.x + face.height * 1.2, face.y - 10 - face.height*0.3), CV_RGB(0, 0, 0), -1, 4, 0); + for (int i = 0; i < carplate.size(); i++) + { + cvRectangle(showcarplate, cvPoint(carplate[i].first.x, carplate[i].first.y), cvPoint(carplate[i].first.x + carplate[i].first.width, carplate[i].first.y + carplate[i].first.height), CV_RGB(0, 255, 0), 1, 4, 0); + } + // text.putText(showcarplate, strID, cvPoint(face.x + face.height *0.3, face.y - 10), CV_RGB(255, 255, 255)); + // text.putText(showcarplate, strID_CN, cvPoint(face.x, face.y - 10), CV_RGB(255, 255, 255)); + } + else if(faceInfo.size() != 0) { + + + /*cvRectangle(showcarplate, cvPoint(face.x - face.height*0.1, face.y - 5), cvPoint(face.x + face.height * 1.2, face.y - 10 - face.height*0.3), CV_RGB(0, 0, 0), -1, 4, 0);*/ + for (int i = 0; i < carplate.size(); i++) + { + carplate[i].first.x -= face.x; carplate[i].first.y -= face.y; + cvRectangle(showcarplate, cvPoint(carplate[i].first.x, carplate[i].first.y), cvPoint(carplate[i].first.x + carplate[i].first.width, carplate[i].first.y + carplate[i].first.height), CV_RGB(0, 255, 0), 1, 4, 0); + } + } + for (int i = 0; i < carplate.size(); i++) + { + printf("%s", carplate[i].second); + } + cvNamedWindow("showcarplate", 0); + cvShowImage("showcarplate", showcarplate); + + carplate.clear(); + printf(" \n"); + } + + + + + if (0) + { + int c = cvWaitKey(); + if ((c == 's') || (c == 'S')) { + _mkdir("error"); + + SYSTEMTIME stTime; + GetLocalTime(&stTime); + char pVideoName[256]; + sprintf_s(pVideoName, 256, "\\%d_%d_%d_%d_%d_%d_%d", stTime.wYear, stTime.wMonth, stTime.wDay, stTime.wHour, stTime.wMinute, stTime.wSecond, stTime.wMilliseconds); + char image_name1[500]; + sprintf_s(image_name1, 500, "%s%s%s%s", "error", "\\", pVideoName, ".bmp");//ͼƬ + imwrite(image_name1, image); + + + _mkdir("orig_result"); + char outName[200]; + sprintf_s(outName, 200, "%s%s%s", "orig_result", "\\", file_name[iNum].c_str()); + cout << iNum << outName << endl; + MoveFile(img_path[iNum].c_str(), outName); // D2.txtƶE + + + } + else if ((c == 'b') || (c == 'B')) { + if (iNum > 1) { + iNum--; iNum--; + } + } + } + + + + } + + + + + + + cv::namedWindow("Extracted frame_", 0); + cv::imshow("Extracted frame_", image); + + int c = cv::waitKey(0); + //ESCߵָĽ֡˳ȡƵ + if ((char)c == 27 /*|| currentFrame > frameToStop*/) + { + stop = true; + } + //°ͣڵǰ֡ȴһΰ + //if (c >= 0) + //{ + // waitKey(0); + //} + // currentFrame++; + + } + + //رƵļ + capture.release(); + } + return 0; + + +} \ No newline at end of file diff --git a/ocr_test/ocr_test.vcxproj b/ocr_test/ocr_test.vcxproj new file mode 100644 index 0000000000000000000000000000000000000000..7964189feef0310474354a00e8dc22afac803d71 --- /dev/null +++ b/ocr_test/ocr_test.vcxproj @@ -0,0 +1,184 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + {2CC545B1-643B-4307-9D90-13E93E8FEBD3} + Win32Proj + ocr_test + 8.1 + + + + Application + true + v140 + Unicode + + + Application + false + v140 + true + Unicode + + + Application + true + v140 + MultiByte + + + Application + false + v140 + true + MultiByte + + + + + + + + + + + + + + + + + + + + + + true + + + true + F:\boost_1_57_0\stage\lib;F:\opencv\build\x64\vc14\staticlib;..\..\tools_bin;$(LibraryPath) + ..\..\..\boost_1_57_0;F:\opencv\build\include;..\libClassification;..\..\include;$(IncludePath) + ..\..\tools_bin\ + $(ProjectName)d + + + false + + + false + ..\..\opensource\opencv\lib;..\..\opensource\boost_1_57_0\lib;..\..\tools_bin;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.2\lib\x64;$(LibraryPath) + ..\..\opensource\boost_1_57_0;..\..\opensource\opencv\include;..\libClassification;..\..\include;..\..\3rdparty\include;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.2\include;..\..\src\;..\..\3rdparty\include\openblas;$(IncludePath) + ..\..\tools_bin\ + + + + Use + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + + + Console + true + + + + + NotUsing + Level3 + Disabled + _DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;%(PreprocessorDefinitions) + true + + + Console + true + opencv_core2410d.lib;opencv_highgui2410d.lib;opencv_imgproc2410d.lib;libjpegd.lib;libpngd.lib;zlibd.lib;%(AdditionalDependencies) + + + + + Level3 + Use + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + + + Console + true + true + true + + + + + Level3 + NotUsing + Disabled + true + true + NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;GLOG_NO_ABBREVIATED_SEVERITIES;%(PreprocessorDefinitions) + true + + + Console + true + true + true + opencv_core340.lib;opencv_highgui340.lib;opencv_imgcodecs340.lib;opencv_imgproc340.lib;opencv_objdetect340.lib;opencv_ml340.lib;opencv_video340.lib;opencv_videoio340.lib;%(AdditionalDependencies) + + + + + + + + + + + + + + + + + + + + + + + Create + Create + Create + Create + + + + + + + \ No newline at end of file diff --git a/ocr_test/ocr_test.vcxproj.filters b/ocr_test/ocr_test.vcxproj.filters new file mode 100644 index 0000000000000000000000000000000000000000..fb7e934b53aed0bc80734db122b486e5b6dfc109 --- /dev/null +++ b/ocr_test/ocr_test.vcxproj.filters @@ -0,0 +1,64 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + + + + + 头文件 + + + 头文件 + + + 头文件 + + + 头文件 + + + 头文件 + + + 头文件 + + + 头文件 + + + 头文件 + + + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + \ No newline at end of file diff --git a/ocr_test/ocr_test.vcxproj.user b/ocr_test/ocr_test.vcxproj.user new file mode 100644 index 0000000000000000000000000000000000000000..f28c3272a8d4adea33ca2a3e5325bce3c39b4449 --- /dev/null +++ b/ocr_test/ocr_test.vcxproj.user @@ -0,0 +1,7 @@ + + + + C:\plate_card_BLSTM\vs2013_caffe_BN_multi_label_kenel_w\model_platecar ..\model\test2.jpg + WindowsLocalDebugger + + \ No newline at end of file diff --git a/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_460000.caffemodel b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_460000.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..6fb9e6c0950a22d61b44de43111e5e4586bced0a Binary files /dev/null and b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_460000.caffemodel differ diff --git a/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_460000.solverstate b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_460000.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..ad8dcf3661552bc4056684a3d3d38e678928b1cb Binary files /dev/null and b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_460000.solverstate differ diff --git a/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_660000.caffemodel b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_660000.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..32dc35880502d2df77f246cef98fbb5e61ec69bd Binary files /dev/null and b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_660000.caffemodel differ diff --git a/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_660000.solverstate b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_660000.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..eb735cc1835681c7eaede051259e792711cf56dd Binary files /dev/null and b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_660000.solverstate differ diff --git a/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_720943.caffemodel b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_720943.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..86368bfc3154ef27b2af42a259292321dd1d900c Binary files /dev/null and b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_720943.caffemodel differ diff --git a/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_720943.solverstate b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_720943.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..bbe50537d3d7b1ebca1e567425abe5f713cbe4b9 Binary files /dev/null and b/ocr_test/plateCard_test/densenet-bigger-5x5-no-lstm_iter_720943.solverstate differ diff --git a/ocr_test/plateCard_test/densenet-sum-blstm-full-res-blstm_train-val.prototxt b/ocr_test/plateCard_test/densenet-sum-blstm-full-res-blstm_train-val.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..139c08fa0c26a33bbdc1a3b8021b6a71eb868324 --- /dev/null +++ b/ocr_test/plateCard_test/densenet-sum-blstm-full-res-blstm_train-val.prototxt @@ -0,0 +1,574 @@ + + +layer { + name: "data" + type: "ImageData" + #type: "Data" + top: "data" + top: "label" + include { + phase: TRAIN + } + transform_param { + mean_value:152 + mean_value:152 + mean_value:152 + } + image_data_param { + #data_param { + source: "C:\\plate_card_BLSTM\\train_tools\\lstm_plate_char_label_train.txt" + new_height: 32 + new_width: 280 + is_color: 1 + #root_folder: "C:/plate_card_BLSTM/train_data/train/" + batch_size: 96 + #rand_skip: 1968 + shuffle: true + } +} + +layer { + name: "data" + type: "ImageData" + #type: "Data" + top: "data" + top: "label" + include { + phase: TEST + } + transform_param { + mean_value:152 + mean_value:152 + mean_value:152 + } + image_data_param { + #data_param { + source: "C:\\plate_card_BLSTM\\train_tools\\lstm_plate_char_label_test.txt" + new_height: 32 + new_width: 280 + is_color: 1 + #root_folder: "C:\\plate_card_BLSTM\\train_data\\train\\" + batch_size: 96 + shuffle: true + #rand_skip: 1968 + } +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + param { + lr_mult: 1 + decay_mult: 1 + } + convolution_param { + num_output: 64 + kernel_size: 5 + pad: 2 + stride: 2 + weight_filler { type: "msra"} + bias_filler { type: "constant" value: 0 } + } +} + + +# DenseBlock 1 +layer { + name: "DenseBlock1" + type: "DenseBlock" + bottom: "conv1" + top: "DenseBlock1" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm1" + type: "BatchNorm" + bottom: "DenseBlock1" + top: "BatchNorm1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale1" + type: "Scale" + bottom: "BatchNorm1" + top: "BatchNorm1" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU1" + type: "ReLU" + bottom: "BatchNorm1" + top: "BatchNorm1" +} +layer { + name: "Convolution2" + type: "Convolution" + bottom: "BatchNorm1" + top: "Convolution2" + convolution_param { + num_output: 128 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout1" + type: "Dropout" + bottom: "Convolution2" + top: "Dropout1" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling1" + type: "Pooling" + bottom: "Dropout1" + top: "Pooling1" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 2 +layer { + name: "DenseBlock2" + type: "DenseBlock" + bottom: "Pooling1" + top: "DenseBlock2" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm2" + type: "BatchNorm" + bottom: "DenseBlock2" + top: "BatchNorm2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale2" + type: "Scale" + bottom: "BatchNorm2" + top: "BatchNorm2" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU2" + type: "ReLU" + bottom: "BatchNorm2" + top: "BatchNorm2" +} + + + +layer { + name: "Convolution3" + type: "Convolution" + bottom: "BatchNorm2" + top: "Convolution3" + convolution_param { + num_output: 192 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout2" + type: "Dropout" + bottom: "Convolution3" + top: "Convolution3" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling2" + type: "Pooling" + bottom: "Convolution3" + top: "Pooling2" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 3 +layer { + name: "DenseBlock3" + type: "DenseBlock" + bottom: "Pooling2" + top: "DenseBlock3" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm3" + type: "BatchNorm" + bottom: "DenseBlock3" + top: "BatchNorm3" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale3" + type: "Scale" + bottom: "BatchNorm3" + top: "BatchNorm3" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU3" + type: "ReLU" + bottom: "BatchNorm3" + top: "BatchNorm3" +} + + +layer { + name: "pool5_ave" + type: "Pooling" + bottom: "BatchNorm3" + top: "pool5_ave" + pooling_param { + pool: AVE + kernel_w: 1 + kernel_h: 4 + stride_w: 1 + stride_h: 1 + } +} + +layer { + name: "pool5_ave_transpose" + top: "pool5_ave_transpose" + bottom: "pool5_ave" + type: "Transpose" + transpose_param { + dim: 3 + dim: 2 + dim: 0 + dim: 1 + } +} + +layer { + name: "blstm_input" + type: "Reshape" + bottom: "pool5_ave_transpose" + top: "blstm_input" + reshape_param { + shape { dim: -1 } + axis: 1 + num_axes: 2 + } +} + +#===================blstm layer 1============================ +#======lstm1=================== +layer { + name: "lstm1" + type: "Lstm" + bottom: "blstm_input" + top: "lstm1" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm1_reverse=================== +layer { + name: "lstm1-reverse1" + type: "Reverse" + bottom: "blstm_input" + top: "rlstm1_input" + reverse_param { + axis: 0 + } +} +layer { + name: "rlstm1" + type: "Lstm" + bottom: "rlstm1_input" + top: "rlstm1-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm1-reverse2" + type: "Reverse" + bottom: "rlstm1-output" + top: "rlstm1" + reverse_param { + axis: 0 + } +} + + +# merge lstm1 and rlstm1 +layer { + name: "blstm1" + type: "Eltwise" + bottom: "lstm1" + bottom: "rlstm1" + bottom: "blstm_input" + top: "blstm1" + eltwise_param { + operation: SUM + } +} + + + + +#===================blstm layer 2============================ +#======lstm2=================== +layer { + name: "lstm2" + type: "Lstm" + bottom: "blstm1" + top: "lstm2" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm2_reverse=================== +layer { + name: "lstm2-reverse1" + type: "Reverse" + bottom: "blstm1" + top: "rlstm2_input" + reverse_param { + axis: 0 + } +} + +layer { + name: "rlstm2" + type: "Lstm" + bottom: "rlstm2_input" + top: "rlstm2-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm2-reverse2" + type: "Reverse" + bottom: "rlstm2-output" + top: "rlstm2" + reverse_param { + axis: 0 + } +} + +# merge lstm2 and rlstm2 +layer { + name: "blstm2" + type: "Eltwise" + bottom: "lstm2" + bottom: "rlstm2" + bottom: "blstm1" + bottom: "blstm_input" + top: "blstm2" + eltwise_param { + operation: SUM + } +} + +layer { + name: "fc1x" + type: "InnerProduct" + bottom: "blstm2" + top: "fc1x" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + axis: 2 + num_output: 69 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} + + +layer { + name: "ctcloss" + type: "WarpCTCLoss" + bottom: "fc1x" + bottom: "label" + top: "ctcloss" + loss_weight:1 +} + +layer { + name: "acc" + type: "CTCGreedyDecoder" + bottom: "fc1x" + bottom: "label" + top: "acc" + include { + phase: TEST + } +} \ No newline at end of file diff --git a/ocr_test/plateCard_test/deploy.dll b/ocr_test/plateCard_test/deploy.dll new file mode 100644 index 0000000000000000000000000000000000000000..9a90f1d2216b6cf8b8b5a78af1ee82068172ee75 --- /dev/null +++ b/ocr_test/plateCard_test/deploy.dll @@ -0,0 +1,514 @@ +name: "densenet" + +input: "data" +input_dim: 1 +input_dim: 3 +input_dim: 32 +input_dim: 280 + + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + param { + lr_mult: 1 + decay_mult: 1 + } + convolution_param { + num_output: 64 + kernel_size: 5 + pad: 2 + stride: 2 + weight_filler { type: "msra"} + bias_filler { type: "constant" value: 0 } + } +} + + +# DenseBlock 1 +layer { + name: "DenseBlock1" + type: "DenseBlock" + bottom: "conv1" + top: "DenseBlock1" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm1" + type: "BatchNorm" + bottom: "DenseBlock1" + top: "BatchNorm1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale1" + type: "Scale" + bottom: "BatchNorm1" + top: "BatchNorm1" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU1" + type: "ReLU" + bottom: "BatchNorm1" + top: "BatchNorm1" +} +layer { + name: "Convolution2" + type: "Convolution" + bottom: "BatchNorm1" + top: "Convolution2" + convolution_param { + num_output: 128 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout1" + type: "Dropout" + bottom: "Convolution2" + top: "Dropout1" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling1" + type: "Pooling" + bottom: "Dropout1" + top: "Pooling1" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 2 +layer { + name: "DenseBlock2" + type: "DenseBlock" + bottom: "Pooling1" + top: "DenseBlock2" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm2" + type: "BatchNorm" + bottom: "DenseBlock2" + top: "BatchNorm2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale2" + type: "Scale" + bottom: "BatchNorm2" + top: "BatchNorm2" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU2" + type: "ReLU" + bottom: "BatchNorm2" + top: "BatchNorm2" +} + + + +layer { + name: "Convolution3" + type: "Convolution" + bottom: "BatchNorm2" + top: "Convolution3" + convolution_param { + num_output: 192 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout2" + type: "Dropout" + bottom: "Convolution3" + top: "Convolution3" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling2" + type: "Pooling" + bottom: "Convolution3" + top: "Pooling2" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 3 +layer { + name: "DenseBlock3" + type: "DenseBlock" + bottom: "Pooling2" + top: "DenseBlock3" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm3" + type: "BatchNorm" + bottom: "DenseBlock3" + top: "BatchNorm3" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale3" + type: "Scale" + bottom: "BatchNorm3" + top: "BatchNorm3" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU3" + type: "ReLU" + bottom: "BatchNorm3" + top: "BatchNorm3" +} + + +layer { + name: "pool5_ave" + type: "Pooling" + bottom: "BatchNorm3" + top: "pool5_ave" + pooling_param { + pool: AVE + kernel_w: 1 + kernel_h: 4 + stride_w: 1 + stride_h: 1 + } +} + +layer { + name: "pool5_ave_transpose" + top: "pool5_ave_transpose" + bottom: "pool5_ave" + type: "Transpose" + transpose_param { + dim: 3 + dim: 2 + dim: 0 + dim: 1 + } +} + +layer { + name: "blstm_input" + type: "Reshape" + bottom: "pool5_ave_transpose" + top: "blstm_input" + reshape_param { + shape { dim: -1 } + axis: 1 + num_axes: 2 + } +} + +#===================blstm layer 1============================ +#======lstm1=================== +layer { + name: "lstm1" + type: "Lstm" + bottom: "blstm_input" + top: "lstm1" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm1_reverse=================== +layer { + name: "lstm1-reverse1" + type: "Reverse" + bottom: "blstm_input" + top: "rlstm1_input" + reverse_param { + axis: 0 + } +} +layer { + name: "rlstm1" + type: "Lstm" + bottom: "rlstm1_input" + top: "rlstm1-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm1-reverse2" + type: "Reverse" + bottom: "rlstm1-output" + top: "rlstm1" + reverse_param { + axis: 0 + } +} + + +# merge lstm1 and rlstm1 +layer { + name: "blstm1" + type: "Eltwise" + bottom: "lstm1" + bottom: "rlstm1" + bottom: "blstm_input" + top: "blstm1" + eltwise_param { + operation: SUM + } +} + + + + +#===================blstm layer 2============================ +#======lstm2=================== +layer { + name: "lstm2" + type: "Lstm" + bottom: "blstm1" + top: "lstm2" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm2_reverse=================== +layer { + name: "lstm2-reverse1" + type: "Reverse" + bottom: "blstm1" + top: "rlstm2_input" + reverse_param { + axis: 0 + } +} + +layer { + name: "rlstm2" + type: "Lstm" + bottom: "rlstm2_input" + top: "rlstm2-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm2-reverse2" + type: "Reverse" + bottom: "rlstm2-output" + top: "rlstm2" + reverse_param { + axis: 0 + } +} + +# merge lstm2 and rlstm2 +layer { + name: "blstm2" + type: "Eltwise" + bottom: "lstm2" + bottom: "rlstm2" + bottom: "blstm1" + bottom: "blstm_input" + top: "blstm2" + eltwise_param { + operation: SUM + } +} + +layer { + name: "fc1x" + type: "InnerProduct" + bottom: "blstm2" + top: "fc1x" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + axis: 2 + num_output: 69 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} + + +layer { + name: "result" + type: "CTCGreedyDecoder" + bottom: "fc1x" + top: "result" +} \ No newline at end of file diff --git a/ocr_test/plateCard_test/label.dll b/ocr_test/plateCard_test/label.dll new file mode 100644 index 0000000000000000000000000000000000000000..4020e2aa4b1022068ee32177fc2a935f48e6e7a9 --- /dev/null +++ b/ocr_test/plateCard_test/label.dll @@ -0,0 +1,69 @@ +blank +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +A +B +C +D +E +F +G +H +J +K +L +M +N +P +Q +R +S +T +U +V +W +X +Y +Z +ѧ + + + + + + + + + + + + + + + + + +³ + + + + + + + + + + + +ԥ + + + \ No newline at end of file diff --git a/ocr_test/plateCard_test/mean_values.txt b/ocr_test/plateCard_test/mean_values.txt new file mode 100644 index 0000000000000000000000000000000000000000..f2c94c6579fc063bea87dec00bad89680fcf6473 --- /dev/null +++ b/ocr_test/plateCard_test/mean_values.txt @@ -0,0 +1,3 @@ +152 +152 +152 \ No newline at end of file diff --git a/ocr_test/plateCard_test/model.dll b/ocr_test/plateCard_test/model.dll new file mode 100644 index 0000000000000000000000000000000000000000..86368bfc3154ef27b2af42a259292321dd1d900c Binary files /dev/null and b/ocr_test/plateCard_test/model.dll differ diff --git a/ocr_test/plateCard_test/model46.caffemodel b/ocr_test/plateCard_test/model46.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..6fb9e6c0950a22d61b44de43111e5e4586bced0a Binary files /dev/null and b/ocr_test/plateCard_test/model46.caffemodel differ diff --git "a/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_420000.caffemodel" "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_420000.caffemodel" new file mode 100644 index 0000000000000000000000000000000000000000..9c5b7c489ac1a12a8ebd58f1b5e32a5bbb51c9be Binary files /dev/null and "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_420000.caffemodel" differ diff --git "a/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_420000.solverstate" "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_420000.solverstate" new file mode 100644 index 0000000000000000000000000000000000000000..ba091b87a3cb482c12d9493e28e29be84ebc5d48 Binary files /dev/null and "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_420000.solverstate" differ diff --git "a/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_682957.solverstate" "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_682957.solverstate" new file mode 100644 index 0000000000000000000000000000000000000000..d014c8d451acb2ed938f48990c819ad719c1a7ad Binary files /dev/null and "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-bigger-5x5-no-lstm_iter_682957.solverstate" differ diff --git "a/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-sum-blstm-full-res-blstm_train-val.prototxt" "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-sum-blstm-full-res-blstm_train-val.prototxt" new file mode 100644 index 0000000000000000000000000000000000000000..f1bb8eec26728bbc2c62e9c0d9030a86e2aac5a8 --- /dev/null +++ "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/densenet-sum-blstm-full-res-blstm_train-val.prototxt" @@ -0,0 +1,574 @@ + + +layer { + name: "data" + type: "ImageData" + #type: "Data" + top: "data" + top: "label" + include { + phase: TRAIN + } + transform_param { + mean_value:152 + mean_value:152 + mean_value:152 + } + image_data_param { + #data_param { + source: "lstm_plate_char_label_train.txt" + new_height: 32 + new_width: 280 + is_color: 1 + root_folder: "C:\\plate_card_BLSTM\\train\\" + batch_size: 96 + #rand_skip: 1968 + shuffle: true + } +} + +layer { + name: "data" + type: "ImageData" + #type: "Data" + top: "data" + top: "label" + include { + phase: TEST + } + transform_param { + mean_value:152 + mean_value:152 + mean_value:152 + } + image_data_param { + #data_param { + source: "lstm_plate_char_label_test.txt" + new_height: 32 + new_width: 280 + is_color: 1 + root_folder: "C:\\plate_card_BLSTM\\train\\" + batch_size: 64 + shuffle: true + #rand_skip: 1968 + } +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + param { + lr_mult: 1 + decay_mult: 1 + } + convolution_param { + num_output: 64 + kernel_size: 5 + pad: 2 + stride: 2 + weight_filler { type: "msra"} + bias_filler { type: "constant" value: 0 } + } +} + + +# DenseBlock 1 +layer { + name: "DenseBlock1" + type: "DenseBlock" + bottom: "conv1" + top: "DenseBlock1" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm1" + type: "BatchNorm" + bottom: "DenseBlock1" + top: "BatchNorm1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale1" + type: "Scale" + bottom: "BatchNorm1" + top: "BatchNorm1" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU1" + type: "ReLU" + bottom: "BatchNorm1" + top: "BatchNorm1" +} +layer { + name: "Convolution2" + type: "Convolution" + bottom: "BatchNorm1" + top: "Convolution2" + convolution_param { + num_output: 128 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout1" + type: "Dropout" + bottom: "Convolution2" + top: "Dropout1" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling1" + type: "Pooling" + bottom: "Dropout1" + top: "Pooling1" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 2 +layer { + name: "DenseBlock2" + type: "DenseBlock" + bottom: "Pooling1" + top: "DenseBlock2" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm2" + type: "BatchNorm" + bottom: "DenseBlock2" + top: "BatchNorm2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale2" + type: "Scale" + bottom: "BatchNorm2" + top: "BatchNorm2" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU2" + type: "ReLU" + bottom: "BatchNorm2" + top: "BatchNorm2" +} + + + +layer { + name: "Convolution3" + type: "Convolution" + bottom: "BatchNorm2" + top: "Convolution3" + convolution_param { + num_output: 192 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout2" + type: "Dropout" + bottom: "Convolution3" + top: "Convolution3" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling2" + type: "Pooling" + bottom: "Convolution3" + top: "Pooling2" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 3 +layer { + name: "DenseBlock3" + type: "DenseBlock" + bottom: "Pooling2" + top: "DenseBlock3" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm3" + type: "BatchNorm" + bottom: "DenseBlock3" + top: "BatchNorm3" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale3" + type: "Scale" + bottom: "BatchNorm3" + top: "BatchNorm3" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU3" + type: "ReLU" + bottom: "BatchNorm3" + top: "BatchNorm3" +} + + +layer { + name: "pool5_ave" + type: "Pooling" + bottom: "BatchNorm3" + top: "pool5_ave" + pooling_param { + pool: AVE + kernel_w: 1 + kernel_h: 4 + stride_w: 1 + stride_h: 1 + } +} + +layer { + name: "pool5_ave_transpose" + top: "pool5_ave_transpose" + bottom: "pool5_ave" + type: "Transpose" + transpose_param { + dim: 3 + dim: 2 + dim: 0 + dim: 1 + } +} + +layer { + name: "blstm_input" + type: "Reshape" + bottom: "pool5_ave_transpose" + top: "blstm_input" + reshape_param { + shape { dim: -1 } + axis: 1 + num_axes: 2 + } +} + +#===================blstm layer 1============================ +#======lstm1=================== +layer { + name: "lstm1" + type: "Lstm" + bottom: "blstm_input" + top: "lstm1" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm1_reverse=================== +layer { + name: "lstm1-reverse1" + type: "Reverse" + bottom: "blstm_input" + top: "rlstm1_input" + reverse_param { + axis: 0 + } +} +layer { + name: "rlstm1" + type: "Lstm" + bottom: "rlstm1_input" + top: "rlstm1-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm1-reverse2" + type: "Reverse" + bottom: "rlstm1-output" + top: "rlstm1" + reverse_param { + axis: 0 + } +} + + +# merge lstm1 and rlstm1 +layer { + name: "blstm1" + type: "Eltwise" + bottom: "lstm1" + bottom: "rlstm1" + bottom: "blstm_input" + top: "blstm1" + eltwise_param { + operation: SUM + } +} + + + + +#===================blstm layer 2============================ +#======lstm2=================== +layer { + name: "lstm2" + type: "Lstm" + bottom: "blstm1" + top: "lstm2" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm2_reverse=================== +layer { + name: "lstm2-reverse1" + type: "Reverse" + bottom: "blstm1" + top: "rlstm2_input" + reverse_param { + axis: 0 + } +} + +layer { + name: "rlstm2" + type: "Lstm" + bottom: "rlstm2_input" + top: "rlstm2-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm2-reverse2" + type: "Reverse" + bottom: "rlstm2-output" + top: "rlstm2" + reverse_param { + axis: 0 + } +} + +# merge lstm2 and rlstm2 +layer { + name: "blstm2" + type: "Eltwise" + bottom: "lstm2" + bottom: "rlstm2" + bottom: "blstm1" + bottom: "blstm_input" + top: "blstm2" + eltwise_param { + operation: SUM + } +} + +layer { + name: "fc1x_69" + type: "InnerProduct" + bottom: "blstm2" + top: "fc1x" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + axis: 2 + num_output: 69 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} + + +layer { + name: "ctcloss" + type: "WarpCTCLoss" + bottom: "fc1x" + bottom: "label" + top: "ctcloss" + loss_weight:1 +} + +layer { + name: "acc" + type: "CTCGreedyDecoder" + bottom: "fc1x" + bottom: "label" + top: "acc" + include { + phase: TEST + } +} \ No newline at end of file diff --git "a/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/deploy.prototxt" "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/deploy.prototxt" new file mode 100644 index 0000000000000000000000000000000000000000..4c39f05f04b83bdbe8bd8d29d048b656ce982f85 --- /dev/null +++ "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/deploy.prototxt" @@ -0,0 +1,514 @@ +name: "densenet" + +input: "data" +input_dim: 1 +input_dim: 3 +input_dim: 32 +input_dim: 280 + + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + param { + lr_mult: 1 + decay_mult: 1 + } + convolution_param { + num_output: 64 + kernel_size: 5 + pad: 2 + stride: 2 + weight_filler { type: "msra"} + bias_filler { type: "constant" value: 0 } + } +} + + +# DenseBlock 1 +layer { + name: "DenseBlock1" + type: "DenseBlock" + bottom: "conv1" + top: "DenseBlock1" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm1" + type: "BatchNorm" + bottom: "DenseBlock1" + top: "BatchNorm1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale1" + type: "Scale" + bottom: "BatchNorm1" + top: "BatchNorm1" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU1" + type: "ReLU" + bottom: "BatchNorm1" + top: "BatchNorm1" +} +layer { + name: "Convolution2" + type: "Convolution" + bottom: "BatchNorm1" + top: "Convolution2" + convolution_param { + num_output: 128 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout1" + type: "Dropout" + bottom: "Convolution2" + top: "Dropout1" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling1" + type: "Pooling" + bottom: "Dropout1" + top: "Pooling1" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 2 +layer { + name: "DenseBlock2" + type: "DenseBlock" + bottom: "Pooling1" + top: "DenseBlock2" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm2" + type: "BatchNorm" + bottom: "DenseBlock2" + top: "BatchNorm2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale2" + type: "Scale" + bottom: "BatchNorm2" + top: "BatchNorm2" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU2" + type: "ReLU" + bottom: "BatchNorm2" + top: "BatchNorm2" +} + + + +layer { + name: "Convolution3" + type: "Convolution" + bottom: "BatchNorm2" + top: "Convolution3" + convolution_param { + num_output: 192 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout2" + type: "Dropout" + bottom: "Convolution3" + top: "Convolution3" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling2" + type: "Pooling" + bottom: "Convolution3" + top: "Pooling2" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 3 +layer { + name: "DenseBlock3" + type: "DenseBlock" + bottom: "Pooling2" + top: "DenseBlock3" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm3" + type: "BatchNorm" + bottom: "DenseBlock3" + top: "BatchNorm3" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale3" + type: "Scale" + bottom: "BatchNorm3" + top: "BatchNorm3" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU3" + type: "ReLU" + bottom: "BatchNorm3" + top: "BatchNorm3" +} + + +layer { + name: "pool5_ave" + type: "Pooling" + bottom: "BatchNorm3" + top: "pool5_ave" + pooling_param { + pool: AVE + kernel_w: 1 + kernel_h: 4 + stride_w: 1 + stride_h: 1 + } +} + +layer { + name: "pool5_ave_transpose" + top: "pool5_ave_transpose" + bottom: "pool5_ave" + type: "Transpose" + transpose_param { + dim: 3 + dim: 2 + dim: 0 + dim: 1 + } +} + +layer { + name: "blstm_input" + type: "Reshape" + bottom: "pool5_ave_transpose" + top: "blstm_input" + reshape_param { + shape { dim: -1 } + axis: 1 + num_axes: 2 + } +} + +#===================blstm layer 1============================ +#======lstm1=================== +layer { + name: "lstm1" + type: "Lstm" + bottom: "blstm_input" + top: "lstm1" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm1_reverse=================== +layer { + name: "lstm1-reverse1" + type: "Reverse" + bottom: "blstm_input" + top: "rlstm1_input" + reverse_param { + axis: 0 + } +} +layer { + name: "rlstm1" + type: "Lstm" + bottom: "rlstm1_input" + top: "rlstm1-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm1-reverse2" + type: "Reverse" + bottom: "rlstm1-output" + top: "rlstm1" + reverse_param { + axis: 0 + } +} + + +# merge lstm1 and rlstm1 +layer { + name: "blstm1" + type: "Eltwise" + bottom: "lstm1" + bottom: "rlstm1" + bottom: "blstm_input" + top: "blstm1" + eltwise_param { + operation: SUM + } +} + + + + +#===================blstm layer 2============================ +#======lstm2=================== +layer { + name: "lstm2" + type: "Lstm" + bottom: "blstm1" + top: "lstm2" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm2_reverse=================== +layer { + name: "lstm2-reverse1" + type: "Reverse" + bottom: "blstm1" + top: "rlstm2_input" + reverse_param { + axis: 0 + } +} + +layer { + name: "rlstm2" + type: "Lstm" + bottom: "rlstm2_input" + top: "rlstm2-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm2-reverse2" + type: "Reverse" + bottom: "rlstm2-output" + top: "rlstm2" + reverse_param { + axis: 0 + } +} + +# merge lstm2 and rlstm2 +layer { + name: "blstm2" + type: "Eltwise" + bottom: "lstm2" + bottom: "rlstm2" + bottom: "blstm1" + bottom: "blstm_input" + top: "blstm2" + eltwise_param { + operation: SUM + } +} + +layer { + name: "fc1x_69" + type: "InnerProduct" + bottom: "blstm2" + top: "fc1x" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + axis: 2 + num_output: 69 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} + + +layer { + name: "result" + type: "CTCGreedyDecoder" + bottom: "fc1x" + top: "result" +} \ No newline at end of file diff --git "a/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/model.caffemodel" "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/model.caffemodel" new file mode 100644 index 0000000000000000000000000000000000000000..3e54f97982efe3b56f42a1cec0f7bd86df3f228d Binary files /dev/null and "b/ocr_test/plateCard_test/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/model.caffemodel" differ diff --git a/ocr_test/public.h b/ocr_test/public.h new file mode 100644 index 0000000000000000000000000000000000000000..6a5bfcf0594d90a504435d86dc5abb9a824d6303 --- /dev/null +++ b/ocr_test/public.h @@ -0,0 +1,59 @@ +#pragma once + + +//------------------------------------------------------------------------------------------------- +#ifdef WIN32 + #ifndef IMPORT + #define IMPORT __declspec(dllimport) + #endif + #ifndef EXPORT + #define EXPORT __declspec(dllexport) + #endif + #ifndef API + #define API __stdcall + #endif +#else + #ifndef IMPORT + #define IMPORT + #endif + #ifndef EXPORT + #define EXPORT + #endif + #ifndef API + #define API + #endif +#endif + +//------------------------------------------------------------------------------------------------- +#ifndef interface +#define CINTERFACE +#define interface struct +#endif + +//------------------------------------------------------------------------------------------------- +#ifndef min2 +#define min2(a,b) (((a)<(b)) ? (a) : (b)) +#endif + +//------------------------------------------------------------------------------------------------- +#ifndef max2 +#define max2(a,b) (((a)>(b)) ? (a) : (b)) +#endif + +//------------------------------------------------------------------------------------------------- +#ifndef _rect_ +#define _rect_ +struct rect{ int x0, y0, x1, y1; }; +#define rectw(r) (r.x1-r.x0+1) +#define recth(r) (r.y1-r.y0+1) +#endif + +//------------------------------------------------------------------------------------------------- +#ifndef byte +typedef unsigned char byte; +#endif + +//------------------------------------------------------------------------------------------------- +#ifndef word +typedef unsigned short word; +#endif diff --git a/ocr_test/rec_test/cascade12.xml b/ocr_test/rec_test/cascade12.xml new file mode 100644 index 0000000000000000000000000000000000000000..607bd894d731d3ba2569a1413310ae0a13f0572b --- /dev/null +++ b/ocr_test/rec_test/cascade12.xml @@ -0,0 +1,10408 @@ + + + + BOOST + HAAR + 25 + 15 + + GAB + 9.9500000476837158e-001 + 3.0000001192092896e-001 + 9.4999999999999996e-001 + 1 + 300 + + 0 + 1 + ALL + 12 + + + <_> + 11 + -1.6251399517059326e+000 + + <_> + + 0 -1 694 -7.2086244821548462e-002 + + 6.8792390823364258e-001 -8.1407219171524048e-001 + <_> + + 0 -1 138 7.6971486210823059e-002 + + -4.6762531995773315e-001 7.1302038431167603e-001 + <_> + + 0 -1 388 5.7955548167228699e-002 + + -2.7737632393836975e-001 8.8490480184555054e-001 + <_> + + 0 -1 313 -1.7470759153366089e-001 + + 7.7989900112152100e-001 -2.0272587239742279e-001 + <_> + + 0 -1 570 -3.0315775424242020e-002 + + 7.3579686880111694e-001 -2.9271504282951355e-001 + <_> + + 0 -1 466 -2.2699251770973206e-001 + + -8.7592130899429321e-001 2.6184245944023132e-001 + <_> + + 0 -1 472 2.2222604602575302e-002 + + -2.3584529757499695e-001 5.2793049812316895e-001 + <_> + + 0 -1 238 -1.5160607174038887e-002 + + 6.6588300466537476e-001 -2.1023984253406525e-001 + <_> + + 0 -1 293 2.2018256783485413e-001 + + -1.4480605721473694e-001 7.7717304229736328e-001 + <_> + + 0 -1 774 4.9290269613265991e-002 + + 1.8850675225257874e-001 -7.3545551300048828e-001 + <_> + + 0 -1 610 -5.6491583585739136e-002 + + 5.9895467758178711e-001 -1.9187590479850769e-001 + + <_> + 21 + -1.9631267786026001e+000 + + <_> + + 0 -1 272 4.8581756651401520e-002 + + -8.0708128213882446e-001 3.7584605813026428e-001 + <_> + + 0 -1 400 -2.9397435486316681e-002 + + 6.1153280735015869e-001 -4.8089566826820374e-001 + <_> + + 0 -1 314 -3.1789845228195190e-001 + + 7.8902244567871094e-001 -2.0238314568996429e-001 + <_> + + 0 -1 674 6.5800379961729050e-003 + + -1.9626371562480927e-001 6.5736478567123413e-001 + <_> + + 0 -1 475 -1.6969431191682816e-002 + + 6.9301313161849976e-001 -1.1920227110385895e-001 + <_> + + 0 -1 84 4.4716805219650269e-002 + + -1.9045864045619965e-001 5.1003587245941162e-001 + <_> + + 0 -1 190 1.0721042752265930e-002 + + -1.9191724061965942e-001 5.1996380090713501e-001 + <_> + + 0 -1 238 1.9956806674599648e-002 + + -2.1976803243160248e-001 5.0571066141128540e-001 + <_> + + 0 -1 363 -6.1682559549808502e-002 + + 8.0145609378814697e-001 -1.0254951566457748e-001 + <_> + + 0 -1 289 -2.4292188882827759e-001 + + 6.9960737228393555e-001 -1.0233960300683975e-001 + <_> + + 0 -1 371 1.0334219038486481e-001 + + -1.6020181775093079e-001 4.8573195934295654e-001 + <_> + + 0 -1 200 1.1884644627571106e-002 + + 1.7845858633518219e-001 -6.3060945272445679e-001 + <_> + + 0 -1 344 -4.9679703079164028e-003 + + -6.3906025886535645e-001 1.2072710692882538e-001 + <_> + + 0 -1 800 -6.0332454741001129e-003 + + -6.1882525682449341e-001 1.1507444083690643e-001 + <_> + + 0 -1 128 1.1873580515384674e-002 + + 1.0300634056329727e-001 -5.8880245685577393e-001 + <_> + + 0 -1 134 -8.1199277192354202e-003 + + -6.6079556941986084e-001 8.4339156746864319e-002 + <_> + + 0 -1 145 -5.2882306277751923e-002 + + 5.0660252571105957e-001 -1.2996160984039307e-001 + <_> + + 0 -1 649 -2.3115944117307663e-002 + + 4.4238254427909851e-001 -1.7129151523113251e-001 + <_> + + 0 -1 672 2.0139049738645554e-002 + + -1.0410805791616440e-001 5.5971127748489380e-001 + <_> + + 0 -1 799 4.3108966201543808e-003 + + 1.0123755782842636e-001 -6.2685883045196533e-001 + <_> + + 0 -1 243 1.4916403219103813e-002 + + -1.3348171114921570e-001 4.8477530479431152e-001 + + <_> + 30 + -1.9130507707595825e+000 + + <_> + + 0 -1 311 -3.4675604104995728e-001 + + 6.7411494255065918e-001 -6.3702303171157837e-001 + <_> + + 0 -1 210 2.3643326014280319e-002 + + -4.6200242638587952e-001 5.0392824411392212e-001 + <_> + + 0 -1 38 -2.8627879917621613e-002 + + 4.3452042341232300e-001 -3.0821722745895386e-001 + <_> + + 0 -1 245 -1.6323434188961983e-002 + + 5.6291866302490234e-001 -1.7400690913200378e-001 + <_> + + 0 -1 722 -2.9674716293811798e-002 + + 4.5463520288467407e-001 -2.0798887312412262e-001 + <_> + + 0 -1 752 -5.2698314189910889e-002 + + 6.6093504428863525e-001 -1.5003877878189087e-001 + <_> + + 0 -1 297 -2.0552046597003937e-002 + + 5.9406924247741699e-001 -1.4702606201171875e-001 + <_> + + 0 -1 407 1.9042756175622344e-003 + + 1.5562801063060760e-001 -5.8934229612350464e-001 + <_> + + 0 -1 343 -2.9612448997795582e-003 + + -5.5747258663177490e-001 1.5942691266536713e-001 + <_> + + 0 -1 807 -2.4060916155576706e-002 + + 6.5585446357727051e-001 -1.4757204055786133e-001 + <_> + + 0 -1 11 5.3670585155487061e-002 + + 1.5007233619689941e-001 -6.8282759189605713e-001 + <_> + + 0 -1 600 -2.2803394123911858e-002 + + 4.3183639645576477e-001 -1.9339442253112793e-001 + <_> + + 0 -1 524 -1.9103053957223892e-002 + + 5.8731901645660400e-001 -1.0027322173118591e-001 + <_> + + 0 -1 487 -4.9306247383356094e-002 + + 5.0913065671920776e-001 -1.1362070590257645e-001 + <_> + + 0 -1 165 -4.3998216278851032e-003 + + -6.6341334581375122e-001 1.0152698308229446e-001 + <_> + + 0 -1 149 9.1867521405220032e-003 + + 6.5340906381607056e-002 -5.8095878362655640e-001 + <_> + + 0 -1 468 -3.7478968501091003e-002 + + 6.9033890962600708e-001 -7.8115984797477722e-002 + <_> + + 0 -1 170 7.4854664504528046e-002 + + -1.2302782386541367e-001 4.0823748707771301e-001 + <_> + + 0 -1 273 -4.3377112597227097e-002 + + 4.3846896290779114e-001 -1.0812521725893021e-001 + <_> + + 0 -1 6 -3.2526290416717529e-001 + + 3.9165654778480530e-001 -1.1521677672863007e-001 + <_> + + 0 -1 5 -3.1655758619308472e-002 + + -5.4603415727615356e-001 1.0829363763332367e-001 + <_> + + 0 -1 741 -1.5229771379381418e-003 + + -5.3613418340682983e-001 6.6289119422435760e-002 + <_> + + 0 -1 38 4.4522762298583984e-002 + + -7.6075531542301178e-002 6.2845832109451294e-001 + <_> + + 0 -1 582 2.6979606598615646e-002 + + -1.5466867387294769e-001 3.0504372715950012e-001 + <_> + + 0 -1 626 -2.0323577523231506e-001 + + 4.7938537597656250e-001 -9.2802703380584717e-002 + <_> + + 0 -1 50 3.1132256984710693e-001 + + -2.0901152491569519e-001 2.2240902483463287e-001 + <_> + + 0 -1 392 -1.2886178493499756e-001 + + 5.1386284828186035e-001 -8.9367248117923737e-002 + <_> + + 0 -1 526 -4.5659184455871582e-002 + + 3.5739445686340332e-001 -1.2018067389726639e-001 + <_> + + 0 -1 280 1.6725984215736389e-001 + + 6.4147800207138062e-002 -7.2877216339111328e-001 + <_> + + 0 -1 99 1.7514480277895927e-002 + + -1.0654997825622559e-001 3.8574376702308655e-001 + + <_> + 55 + -2.0173692703247070e+000 + + <_> + + 0 -1 588 1.1217807233333588e-001 + + -6.0642522573471069e-001 6.4563655853271484e-001 + <_> + + 0 -1 693 -9.3424037098884583e-002 + + 4.0366497635841370e-001 -4.2621350288391113e-001 + <_> + + 0 -1 163 1.6624230891466141e-002 + + -3.2970666885375977e-001 3.1330439448356628e-001 + <_> + + 0 -1 479 -1.6644325107336044e-002 + + 6.2518942356109619e-001 -1.0661130398511887e-001 + <_> + + 0 -1 24 -2.8568893671035767e-001 + + 7.7842056751251221e-001 -6.2610425055027008e-002 + <_> + + 0 -1 116 3.5677129030227661e-001 + + -1.0335481911897659e-001 5.8461785316467285e-001 + <_> + + 0 -1 583 -6.7239105701446533e-002 + + 5.6404709815979004e-001 -1.4021076261997223e-001 + <_> + + 0 -1 785 -5.7345330715179443e-003 + + -4.9381658434867859e-001 1.4339385926723480e-001 + <_> + + 0 -1 528 9.7006373107433319e-003 + + 8.0212749540805817e-002 -6.1218249797821045e-001 + <_> + + 0 -1 603 -7.8137414529919624e-003 + + -6.1733752489089966e-001 9.8044969141483307e-002 + <_> + + 0 -1 495 7.6342090964317322e-002 + + -8.8390685617923737e-002 6.0566323995590210e-001 + <_> + + 0 -1 202 2.1014085505157709e-003 + + 8.9743942022323608e-002 -5.2556556463241577e-001 + <_> + + 0 -1 133 -7.8479237854480743e-003 + + -6.7503917217254639e-001 7.9681910574436188e-002 + <_> + + 0 -1 410 1.3807678595185280e-002 + + -1.3720980286598206e-001 4.1028419137001038e-001 + <_> + + 0 -1 133 7.5135175138711929e-003 + + 1.1252338439226151e-001 -6.4657258987426758e-001 + <_> + + 0 -1 264 -2.8182831592857838e-003 + + -5.5316627025604248e-001 5.9461358934640884e-002 + <_> + + 0 -1 223 -1.5580770373344421e-001 + + 3.6509868502616882e-001 -1.3882166147232056e-001 + <_> + + 0 -1 434 -1.3790637254714966e-001 + + 4.8499700427055359e-001 -8.0953881144523621e-002 + <_> + + 0 -1 639 1.4884536154568195e-002 + + -8.0786310136318207e-002 5.3827059268951416e-001 + <_> + + 0 -1 599 -9.0376380831003189e-003 + + 3.5563880205154419e-001 -1.0968518257141113e-001 + <_> + + 0 -1 651 1.8296413123607635e-002 + + -7.5912080705165863e-002 6.0991287231445313e-001 + <_> + + 0 -1 8 2.6432290673255920e-002 + + -1.7105233669281006e-001 2.7077639102935791e-001 + <_> + + 0 -1 242 -1.6507744789123535e-001 + + 4.7792035341262817e-001 -9.4892151653766632e-002 + <_> + + 0 -1 455 -1.7269724979996681e-002 + + 3.8151738047599792e-001 -1.0983025282621384e-001 + <_> + + 0 -1 690 1.1594760231673717e-002 + + -1.1129014939069748e-001 3.3691766858100891e-001 + <_> + + 0 -1 808 -1.3633415102958679e-002 + + -6.0082298517227173e-001 6.7737087607383728e-002 + <_> + + 0 -1 34 -1.7866048216819763e-001 + + -7.6020133495330811e-001 4.1513070464134216e-002 + <_> + + 0 -1 337 1.1733185499906540e-002 + + -1.0908238589763641e-001 3.4221148490905762e-001 + <_> + + 0 -1 806 -4.8016691580414772e-003 + + -7.2473436594009399e-001 5.1566515117883682e-002 + <_> + + 0 -1 75 -7.5007192790508270e-003 + + 2.5185352563858032e-001 -1.4202110469341278e-001 + <_> + + 0 -1 590 -2.2442482411861420e-002 + + 2.4061225354671478e-001 -1.4089570939540863e-001 + <_> + + 0 -1 351 -5.9806056320667267e-002 + + 3.6730846762657166e-001 -9.0579837560653687e-002 + <_> + + 0 -1 192 2.9117142781615257e-002 + + 4.2693700641393661e-002 -7.1741634607315063e-001 + <_> + + 0 -1 158 -5.6111458688974380e-002 + + -7.1160662174224854e-001 3.5722028464078903e-002 + <_> + + 0 -1 270 -1.0586555302143097e-001 + + -7.4173212051391602e-001 3.3193625509738922e-002 + <_> + + 0 -1 647 6.9847996346652508e-003 + + 3.0989645048975945e-002 -7.7913534641265869e-001 + <_> + + 0 -1 726 3.0043780803680420e-002 + + 2.5739155709743500e-002 -8.5128879547119141e-001 + <_> + + 0 -1 509 -5.5537477135658264e-002 + + 3.2640528678894043e-001 -9.0293265879154205e-002 + <_> + + 0 -1 560 -1.1899802833795547e-001 + + 3.7325438857078552e-001 -7.3934875428676605e-002 + <_> + + 0 -1 137 2.3963520303368568e-002 + + -1.0457414388656616e-001 3.0054581165313721e-001 + <_> + + 0 -1 718 -1.7708687111735344e-002 + + 2.8643918037414551e-001 -1.0045628994703293e-001 + <_> + + 0 -1 597 9.2176152393221855e-003 + + -1.0655273497104645e-001 2.9579246044158936e-001 + <_> + + 0 -1 333 7.6030939817428589e-003 + + -1.2088721245527267e-001 2.5691181421279907e-001 + <_> + + 0 -1 782 4.0463847108185291e-003 + + -1.3183876872062683e-001 2.1324723958969116e-001 + <_> + + 0 -1 63 -5.7698391377925873e-002 + + -5.7107716798782349e-001 5.3185448050498962e-002 + <_> + + 0 -1 534 1.8702112138271332e-002 + + -1.5002527832984924e-001 1.9817110896110535e-001 + <_> + + 0 -1 355 -3.7422217428684235e-002 + + 3.6545428633689880e-001 -8.2261450588703156e-002 + <_> + + 0 -1 160 4.0244441479444504e-003 + + 6.2232196331024170e-002 -5.2842968702316284e-001 + <_> + + 0 -1 342 -1.9989470019936562e-003 + + -5.4141521453857422e-001 4.6907581388950348e-002 + <_> + + 0 -1 736 -1.3096828013658524e-002 + + 3.2414883375167847e-001 -8.2211688160896301e-002 + <_> + + 0 -1 619 -7.7576912939548492e-002 + + 4.5636558532714844e-001 -5.2367985248565674e-002 + <_> + + 0 -1 636 -5.8553437702357769e-003 + + 2.6129749417304993e-001 -9.4695106148719788e-002 + <_> + + 0 -1 448 5.0675973296165466e-002 + + -9.3877971172332764e-002 2.6746714115142822e-001 + <_> + + 0 -1 435 1.1818362772464752e-001 + + 4.5935548841953278e-002 -6.3326281309127808e-001 + <_> + + 0 -1 22 3.8446130929514766e-004 + + -1.5554963052272797e-001 1.7050321400165558e-001 + + <_> + 67 + -1.9485298395156860e+000 + + <_> + + 0 -1 586 -6.6238045692443848e-002 + + 6.7176431417465210e-001 -5.9806954860687256e-001 + <_> + + 0 -1 24 2.7982944250106812e-001 + + -3.3744829893112183e-001 5.2063155174255371e-001 + <_> + + 0 -1 167 4.5085638761520386e-002 + + -2.3081542551517487e-001 4.3220773339271545e-001 + <_> + + 0 -1 684 -1.8921539187431335e-002 + + 4.8016011714935303e-001 -1.5273953974246979e-001 + <_> + + 0 -1 705 1.8327537924051285e-002 + + -1.3539023697376251e-001 5.5843317508697510e-001 + <_> + + 0 -1 135 -9.7133498638868332e-003 + + -4.9484416842460632e-001 1.3455811142921448e-001 + <_> + + 0 -1 127 1.1846482753753662e-002 + + 1.2395086139440536e-001 -5.3306931257247925e-001 + <_> + + 0 -1 8 -2.2439997643232346e-002 + + 4.4585660099983215e-001 -1.2993499636650085e-001 + <_> + + 0 -1 707 9.7985491156578064e-003 + + -1.1911695450544357e-001 4.7210860252380371e-001 + <_> + + 0 -1 119 5.9385155327618122e-003 + + 8.7143994867801666e-002 -5.9211844205856323e-001 + <_> + + 0 -1 543 6.6975675523281097e-002 + + -1.2327117472887039e-001 4.0334329009056091e-001 + <_> + + 0 -1 624 4.7176703810691833e-002 + + -1.5997993946075439e-001 3.2146406173706055e-001 + <_> + + 0 -1 199 8.3185918629169464e-003 + + 9.5581896603107452e-002 -5.3733414411544800e-001 + <_> + + 0 -1 783 -1.0797622613608837e-002 + + -5.7104659080505371e-001 6.6143624484539032e-002 + <_> + + 0 -1 261 -9.6789980307221413e-003 + + -5.7453703880310059e-001 7.4281655251979828e-002 + <_> + + 0 -1 110 1.3172220438718796e-002 + + 5.1845859736204147e-002 -6.9281905889511108e-001 + <_> + + 0 -1 250 -1.5867080539464951e-002 + + 4.4661539793014526e-001 -1.0089319199323654e-001 + <_> + + 0 -1 398 -1.5642736107110977e-002 + + 4.1672131419181824e-001 -1.1661354452371597e-001 + <_> + + 0 -1 10 1.1639080941677094e-001 + + 5.3063102066516876e-002 -7.6825660467147827e-001 + <_> + + 0 -1 303 -3.8272470235824585e-001 + + -7.3895919322967529e-001 3.7785880267620087e-002 + <_> + + 0 -1 714 -9.4910524785518646e-003 + + -4.7855091094970703e-001 6.8649142980575562e-002 + <_> + + 0 -1 745 1.6879716422408819e-003 + + -1.8806865811347961e-001 2.2522130608558655e-001 + <_> + + 0 -1 687 -1.1047841981053352e-002 + + 3.1802293658256531e-001 -1.2586399912834167e-001 + <_> + + 0 -1 790 7.2766491211950779e-003 + + 4.3859146535396576e-002 -7.5240099430084229e-001 + <_> + + 0 -1 329 3.7978945765644312e-003 + + -1.4030785858631134e-001 2.5546115636825562e-001 + <_> + + 0 -1 717 1.4771098271012306e-002 + + -8.6563937366008759e-002 3.8456541299819946e-001 + <_> + + 0 -1 97 8.0950036644935608e-002 + + 5.8105770498514175e-002 -6.0112601518630981e-001 + <_> + + 0 -1 267 -8.9899823069572449e-003 + + -5.3358817100524902e-001 5.4485905915498734e-002 + <_> + + 0 -1 187 -6.9201096892356873e-002 + + -4.9618330597877502e-001 5.5555611848831177e-002 + <_> + + 0 -1 665 -2.2665487602353096e-002 + + -5.8978593349456787e-001 4.3833933770656586e-002 + <_> + + 0 -1 330 -1.0921448469161987e-002 + + 3.2260680198669434e-001 -9.5255509018898010e-002 + <_> + + 0 -1 616 -2.8704827651381493e-002 + + 3.4510585665702820e-001 -8.5260458290576935e-002 + <_> + + 0 -1 823 -5.0290287472307682e-003 + + -5.8223444223403931e-001 4.9559768289327621e-002 + <_> + + 0 -1 406 -4.6249264851212502e-003 + + -6.3765782117843628e-001 3.7925668060779572e-002 + <_> + + 0 -1 156 -7.1847680956125259e-003 + + -5.0609725713729858e-001 4.9329411238431931e-002 + <_> + + 0 -1 353 -5.5645150132477283e-003 + + 2.0351761579513550e-001 -1.2736704945564270e-001 + <_> + + 0 -1 683 -1.6589738428592682e-002 + + 4.4935634732246399e-001 -7.3382675647735596e-002 + <_> + + 0 -1 17 -5.6185409426689148e-002 + + -6.2861549854278564e-001 4.9926567822694778e-002 + <_> + + 0 -1 52 -1.7093941569328308e-002 + + 2.5514549016952515e-001 -1.1216693371534348e-001 + <_> + + 0 -1 446 4.3069213628768921e-002 + + -8.0584414303302765e-002 3.4598538279533386e-001 + <_> + + 0 -1 389 -6.2493188306689262e-003 + + 2.2559496760368347e-001 -1.4636033773422241e-001 + <_> + + 0 -1 594 4.1493251919746399e-003 + + -9.3292273581027985e-002 3.1113749742507935e-001 + <_> + + 0 -1 643 -1.4010149985551834e-002 + + 2.2401230037212372e-001 -1.3207712769508362e-001 + <_> + + 0 -1 706 -8.5290838032960892e-003 + + 2.9243519902229309e-001 -9.8602615296840668e-002 + <_> + + 0 -1 393 -2.1426618099212646e-002 + + 4.5886808633804321e-001 -5.9665817767381668e-002 + <_> + + 0 -1 55 -5.8664698153734207e-003 + + -4.8501238226890564e-001 6.1560060828924179e-002 + <_> + + 0 -1 439 7.7467754483222961e-002 + + 4.9901798367500305e-002 -5.1934790611267090e-001 + <_> + + 0 -1 804 5.2905576303601265e-003 + + 3.2784264534711838e-002 -6.5088880062103271e-001 + <_> + + 0 -1 396 -1.3058319687843323e-002 + + 2.9557755589485168e-001 -8.8856421411037445e-002 + <_> + + 0 -1 368 -7.7718310058116913e-002 + + 3.0143418908119202e-001 -8.4007382392883301e-002 + <_> + + 0 -1 480 1.0327575728297234e-002 + + -1.2959773838520050e-001 2.9451465606689453e-001 + <_> + + 0 -1 37 3.6722950637340546e-002 + + -6.8360336124897003e-002 3.7415701150894165e-001 + <_> + + 0 -1 85 2.1027203649282455e-002 + + -1.7627145349979401e-001 1.5575961768627167e-001 + <_> + + 0 -1 64 -1.0052478313446045e-001 + + 4.3756628036499023e-001 -7.1234770119190216e-002 + <_> + + 0 -1 492 1.6048339009284973e-001 + + -4.4313762336969376e-002 5.8793294429779053e-001 + <_> + + 0 -1 325 -1.3694979250431061e-002 + + 4.6393951773643494e-001 -4.9393087625503540e-002 + <_> + + 0 -1 801 -1.0348608717322350e-002 + + -5.6864339113235474e-001 4.3830793350934982e-002 + <_> + + 0 -1 113 4.3693828047253191e-004 + + -1.2412225455045700e-001 1.7724548280239105e-001 + <_> + + 0 -1 772 2.0174574106931686e-002 + + 3.4722398966550827e-002 -6.4911144971847534e-001 + <_> + + 0 -1 608 2.7627442032098770e-003 + + 3.3415716141462326e-002 -5.6784248352050781e-001 + <_> + + 0 -1 548 9.4243183732032776e-002 + + 3.6081630736589432e-002 -5.4598355293273926e-001 + <_> + + 0 -1 657 1.3022410869598389e-001 + + 2.3609474301338196e-002 -8.1051534414291382e-001 + <_> + + 0 -1 150 3.4168981015682220e-002 + + 3.0419193208217621e-002 -5.4837167263031006e-001 + <_> + + 0 -1 508 -1.3829786330461502e-002 + + 2.8903034329414368e-001 -8.3337344229221344e-002 + <_> + + 0 -1 567 -1.1257855594158173e-001 + + 2.4400597810745239e-001 -9.4966351985931396e-002 + <_> + + 0 -1 522 2.2601764649152756e-002 + + -1.2031050771474838e-001 2.2070118784904480e-001 + <_> + + 0 -1 103 6.0193203389644623e-003 + + -1.1150372028350830e-001 2.2088561952114105e-001 + + <_> + 100 + -1.9608964920043945e+000 + + <_> + + 0 -1 635 -5.5475331842899323e-002 + + 5.7381832599639893e-001 -6.1133551597595215e-001 + <_> + + 0 -1 114 1.5077188611030579e-002 + + -3.2403641939163208e-001 4.4795718789100647e-001 + <_> + + 0 -1 382 -1.7332084476947784e-001 + + 5.7016098499298096e-001 -1.4949052035808563e-001 + <_> + + 0 -1 527 -2.2349517792463303e-002 + + 3.2987663149833679e-001 -1.9739057123661041e-001 + <_> + + 0 -1 229 -4.3568551540374756e-002 + + 4.7400921583175659e-001 -1.1525857448577881e-001 + <_> + + 0 -1 543 -8.5387237370014191e-002 + + 4.8310154676437378e-001 -1.0712914913892746e-001 + <_> + + 0 -1 484 -1.0893560945987701e-002 + + 4.2913889884948730e-001 -1.2771929800510406e-001 + <_> + + 0 -1 347 2.2162383422255516e-002 + + -1.0264735668897629e-001 4.1223859786987305e-001 + <_> + + 0 -1 129 1.1509815230965614e-002 + + 7.9151436686515808e-002 -5.1365512609481812e-001 + <_> + + 0 -1 112 -1.3226773589849472e-002 + + -4.6660965681076050e-001 9.4403184950351715e-002 + <_> + + 0 -1 92 1.3398203253746033e-001 + + 6.0357581824064255e-002 -7.2359943389892578e-001 + <_> + + 0 -1 771 1.6636839136481285e-003 + + -1.8457394838333130e-001 1.8665075302124023e-001 + <_> + + 0 -1 118 5.4986506700515747e-002 + + 5.3810637444257736e-002 -6.9745784997940063e-001 + <_> + + 0 -1 767 1.0141556151211262e-002 + + -1.1535926163196564e-001 2.9906859993934631e-001 + <_> + + 0 -1 719 1.0847958922386169e-001 + + -7.9667761921882629e-002 4.5295405387878418e-001 + <_> + + 0 -1 268 6.9254308938980103e-002 + + -1.2226542830467224e-001 2.9665270447731018e-001 + <_> + + 0 -1 84 -4.2100343853235245e-002 + + 3.2429689168930054e-001 -9.7494564950466156e-002 + <_> + + 0 -1 698 4.3827138841152191e-002 + + -7.0396944880485535e-002 4.2152154445648193e-001 + <_> + + 0 -1 678 -7.9618133604526520e-003 + + -5.3270429372787476e-001 6.0727626085281372e-002 + <_> + + 0 -1 367 -3.6274239420890808e-002 + + 3.9936524629592896e-001 -8.2018867135047913e-002 + <_> + + 0 -1 132 -3.6207525990903378e-003 + + -4.9971449375152588e-001 6.5834544599056244e-002 + <_> + + 0 -1 78 -5.8745052665472031e-003 + + 2.0759461820125580e-001 -1.3994742929935455e-001 + <_> + + 0 -1 335 5.6833028793334961e-003 + + -1.0299365222454071e-001 3.1138652563095093e-001 + <_> + + 0 -1 562 5.4967451840639114e-002 + + -1.3316051661968231e-001 1.9581510126590729e-001 + <_> + + 0 -1 50 -2.3877078294754028e-001 + + 3.5186833143234253e-001 -8.9515976607799530e-002 + <_> + + 0 -1 104 -1.1658706702291965e-002 + + 2.5490432977676392e-001 -1.1555980890989304e-001 + <_> + + 0 -1 278 5.0145937129855156e-003 + + 6.6223099827766418e-002 -4.1758745908737183e-001 + <_> + + 0 -1 714 -1.2021902948617935e-002 + + -5.4200112819671631e-001 4.3091051280498505e-002 + <_> + + 0 -1 481 1.3384851813316345e-001 + + 2.5700764730572701e-002 -7.6608151197433472e-001 + <_> + + 0 -1 556 1.5479752421379089e-001 + + -5.3280718624591827e-002 5.1743608713150024e-001 + <_> + + 0 -1 695 1.5386009588837624e-002 + + -4.2808044701814651e-002 5.0297588109970093e-001 + <_> + + 0 -1 44 1.7224973440170288e-001 + + -1.2964868545532227e-001 1.9733795523643494e-001 + <_> + + 0 -1 319 5.3552012890577316e-002 + + 4.8952136188745499e-002 -5.9248495101928711e-001 + <_> + + 0 -1 761 1.7618156969547272e-002 + + -9.2624112963676453e-002 2.5510758161544800e-001 + <_> + + 0 -1 589 -3.5309664905071259e-002 + + 4.2031341791152954e-001 -5.1969490945339203e-002 + <_> + + 0 -1 675 9.2930328100919724e-003 + + -1.2086719274520874e-001 2.1424143016338348e-001 + <_> + + 0 -1 262 -3.8868649862706661e-003 + + -6.2090682983398438e-001 3.1773131340742111e-002 + <_> + + 0 -1 36 2.5914441794157028e-002 + + -5.4667808115482330e-002 4.1980910301208496e-001 + <_> + + 0 -1 171 2.6020430028438568e-002 + + -1.0541697591543198e-001 2.0044851303100586e-001 + <_> + + 0 -1 321 -6.3407374545931816e-003 + + 2.2295591235160828e-001 -1.1792995780706406e-001 + <_> + + 0 -1 334 -5.1677217707037926e-003 + + -5.3816491365432739e-001 3.8462761789560318e-002 + <_> + + 0 -1 346 -8.4124654531478882e-003 + + -7.1969830989837646e-001 2.3862140253186226e-002 + <_> + + 0 -1 800 -8.8197160512208939e-003 + + -5.3116542100906372e-001 3.2283063977956772e-002 + <_> + + 0 -1 795 2.3384200409054756e-002 + + 2.7469146996736526e-002 -6.3458430767059326e-001 + <_> + + 0 -1 607 3.3260846976190805e-003 + + -1.0226461291313171e-001 2.2026740014553070e-001 + <_> + + 0 -1 710 2.2873836569488049e-003 + + 3.8538243621587753e-002 -5.3379940986633301e-001 + <_> + + 0 -1 716 -1.7032278701663017e-002 + + 2.5450223684310913e-001 -8.0304980278015137e-002 + <_> + + 0 -1 483 3.7073395214974880e-003 + + -1.0596913844347000e-001 2.5345388054847717e-001 + <_> + + 0 -1 478 7.0802189409732819e-002 + + -7.3215052485466003e-002 2.5112062692642212e-001 + <_> + + 0 -1 627 -4.5040566474199295e-003 + + 2.9091736674308777e-001 -7.3829248547554016e-002 + <_> + + 0 -1 650 -4.1749500669538975e-003 + + 1.8382187187671661e-001 -1.1864095926284790e-001 + <_> + + 0 -1 538 1.9676316529512405e-002 + + -5.3947038948535919e-002 3.4872695803642273e-001 + <_> + + 0 -1 598 2.4505499750375748e-002 + + -1.1110459268093109e-001 1.7675013840198517e-001 + <_> + + 0 -1 642 5.0232456997036934e-003 + + -7.7635556459426880e-002 2.7674710750579834e-001 + <_> + + 0 -1 124 7.2092656046152115e-003 + + 3.8163613528013229e-002 -5.3656154870986938e-001 + <_> + + 0 -1 2 3.8705547922290862e-004 + + -1.4624150097370148e-001 1.3452073931694031e-001 + <_> + + 0 -1 696 -2.5338146835565567e-002 + + 3.6304703354835510e-001 -5.5815316736698151e-002 + <_> + + 0 -1 147 2.9723107814788818e-002 + + 2.4663370102643967e-002 -7.8608381748199463e-001 + <_> + + 0 -1 181 1.2690615840256214e-002 + + 3.1972713768482208e-002 -4.9735805392265320e-001 + <_> + + 0 -1 246 -9.0589851140975952e-002 + + 2.6692071557044983e-001 -7.6755240559577942e-002 + <_> + + 0 -1 601 3.4163692034780979e-003 + + -9.4232670962810516e-002 2.0130588114261627e-001 + <_> + + 0 -1 814 2.2128049749881029e-003 + + 2.9020575806498528e-002 -6.8562233448028564e-001 + <_> + + 0 -1 740 -1.0931958677247167e-003 + + 1.8590304255485535e-001 -1.0203883796930313e-001 + <_> + + 0 -1 798 -5.1551647484302521e-003 + + -6.7470979690551758e-001 2.6759594678878784e-002 + <_> + + 0 -1 185 2.4196054786443710e-002 + + 2.2405004128813744e-002 -6.2768095731735229e-001 + <_> + + 0 -1 54 3.2341023907065392e-003 + + 1.3789996504783630e-001 -1.2909232079982758e-001 + <_> + + 0 -1 116 2.9914700984954834e-001 + + -7.2955578565597534e-002 2.7455681562423706e-001 + <_> + + 0 -1 364 -9.9727194756269455e-003 + + 3.0149310827255249e-001 -5.7043511420488358e-002 + <_> + + 0 -1 162 -6.6150026395916939e-003 + + -4.7744289040565491e-001 3.6662790924310684e-002 + <_> + + 0 -1 753 1.7986021935939789e-001 + + 2.6935050264000893e-002 -5.6041115522384644e-001 + <_> + + 0 -1 120 4.9073372036218643e-002 + + -9.1753169894218445e-002 1.8830762803554535e-001 + <_> + + 0 -1 520 -4.4546205550432205e-002 + + 2.8058284521102905e-001 -6.8988710641860962e-002 + <_> + + 0 -1 672 2.2576067596673965e-002 + + -5.4128490388393402e-002 3.5953769087791443e-001 + <_> + + 0 -1 307 5.6266561150550842e-002 + + -2.9556462541222572e-002 5.1843011379241943e-001 + <_> + + 0 -1 89 1.5538917854428291e-002 + + 2.7421416714787483e-002 -6.1930805444717407e-001 + <_> + + 0 -1 504 8.1526311114430428e-003 + + 9.1529168188571930e-002 -1.6721811890602112e-001 + <_> + + 0 -1 231 -2.6176521554589272e-002 + + -2.1571595966815948e-001 7.4524872004985809e-002 + <_> + + 0 -1 295 2.5498190894722939e-002 + + -3.6771539598703384e-002 4.6460142731666565e-001 + <_> + + 0 -1 617 -2.7546184137463570e-003 + + 2.3868623375892639e-001 -7.3346205055713654e-002 + <_> + + 0 -1 154 1.1484200134873390e-002 + + 8.8087327778339386e-002 -2.2285257279872894e-001 + <_> + + 0 -1 764 -6.5946984104812145e-003 + + 1.9035552442073822e-001 -9.4829380512237549e-002 + <_> + + 0 -1 748 -1.3570852577686310e-002 + + -5.7044094800949097e-001 2.8593784198164940e-002 + <_> + + 0 -1 341 2.1627168171107769e-003 + + 2.4528857320547104e-002 -5.5539435148239136e-001 + <_> + + 0 -1 294 7.6453232765197754e-001 + + 1.6712073236703873e-002 -7.9999411106109619e-001 + <_> + + 0 -1 499 3.2710012048482895e-002 + + -1.2251428514719009e-001 1.4426843822002411e-001 + <_> + + 0 -1 550 4.0088586509227753e-002 + + -8.4161520004272461e-002 2.1347804367542267e-001 + <_> + + 0 -1 613 4.5649609528481960e-003 + + 3.3291704952716827e-002 -4.9624651670455933e-001 + <_> + + 0 -1 731 1.7573118209838867e-002 + + -5.2213698625564575e-002 3.2624527812004089e-001 + <_> + + 0 -1 108 4.4035743921995163e-003 + + 2.8457932174205780e-002 -5.6235110759735107e-001 + <_> + + 0 -1 451 -3.6385841667652130e-003 + + 1.3256885111331940e-001 -1.0901164263486862e-001 + <_> + + 0 -1 509 2.1540902554988861e-002 + + -9.4820916652679443e-002 1.9871900975704193e-001 + <_> + + 0 -1 421 -5.7242000475525856e-003 + + -5.5162918567657471e-001 2.8565581887960434e-002 + <_> + + 0 -1 754 5.4941125214099884e-002 + + 1.6136603429913521e-002 -7.5592172145843506e-001 + <_> + + 0 -1 33 -1.8812941014766693e-001 + + -6.7492049932479858e-001 1.6340442001819611e-002 + <_> + + 0 -1 549 -2.2323146462440491e-002 + + 3.0878978967666626e-001 -5.0334885716438293e-002 + <_> + + 0 -1 122 8.1148128956556320e-003 + + -6.9861218333244324e-002 2.1136663854122162e-001 + <_> + + 0 -1 776 4.0647638961672783e-003 + + 2.4346185848116875e-002 -7.0286405086517334e-001 + <_> + + 0 -1 557 -1.0087935253977776e-002 + + 2.1732743084430695e-001 -7.2459824383258820e-002 + <_> + + 0 -1 497 -3.1481392681598663e-002 + + -1.6200110316276550e-001 1.1238843202590942e-001 + <_> + + 0 -1 91 4.8734731972217560e-002 + + -1.5694068372249603e-001 1.3734646141529083e-001 + + <_> + 100 + -2.0093009471893311e+000 + + <_> + + 0 -1 735 4.4366665184497833e-002 + + -5.8140194416046143e-001 7.2436654567718506e-001 + <_> + + 0 -1 472 2.1525867283344269e-002 + + -5.1378548145294189e-001 2.6829591393470764e-001 + <_> + + 0 -1 175 3.2553628087043762e-002 + + -1.8482643365859985e-001 4.0847155451774597e-001 + <_> + + 0 -1 571 -1.5257634222507477e-002 + + 2.7095904946327209e-001 -2.2653931379318237e-001 + <_> + + 0 -1 733 -5.1631681621074677e-002 + + 7.9776346683502197e-001 -5.5487394332885742e-002 + <_> + + 0 -1 811 4.8636645078659058e-003 + + 1.0041853785514832e-001 -5.7607883214950562e-001 + <_> + + 0 -1 541 -2.4604482576251030e-002 + + 4.0577822923660278e-001 -1.1823268234729767e-001 + <_> + + 0 -1 751 -4.0018320083618164e-002 + + 4.9601888656616211e-001 -1.1562841385602951e-001 + <_> + + 0 -1 311 -3.8432124257087708e-001 + + 5.1655977964401245e-001 -7.7797263860702515e-002 + <_> + + 0 -1 770 1.0087579488754272e-002 + + 8.3183489739894867e-002 -4.2510354518890381e-001 + <_> + + 0 -1 56 3.5646241158246994e-002 + + 4.7171127051115036e-002 -6.9391661882400513e-001 + <_> + + 0 -1 461 -9.2189535498619080e-003 + + -5.7927066087722778e-001 5.5925726890563965e-002 + <_> + + 0 -1 780 9.9300779402256012e-003 + + 3.7854433059692383e-002 -8.3010107278823853e-001 + <_> + + 0 -1 531 -4.5023232698440552e-002 + + 6.5314036607742310e-001 -5.4263141006231308e-002 + <_> + + 0 -1 661 2.7809415478259325e-003 + + -7.9617887735366821e-002 3.6411368846893311e-001 + <_> + + 0 -1 602 5.7087037712335587e-003 + + 4.2994618415832520e-002 -7.1181446313858032e-001 + <_> + + 0 -1 189 -1.8998349085450172e-002 + + 2.8312543034553528e-001 -1.1130740493535995e-001 + <_> + + 0 -1 183 -2.9453031718730927e-002 + + 4.3500581383705139e-001 -7.9582005739212036e-002 + <_> + + 0 -1 517 -3.5714324563741684e-002 + + -5.2660512924194336e-001 6.3691914081573486e-002 + <_> + + 0 -1 533 -9.0164467692375183e-002 + + -5.0678330659866333e-001 5.4722979664802551e-002 + <_> + + 0 -1 691 3.2198697328567505e-002 + + -1.5255528688430786e-001 2.1604290604591370e-001 + <_> + + 0 -1 155 -7.3475898243486881e-003 + + -4.5090731978416443e-001 5.4995406419038773e-002 + <_> + + 0 -1 94 9.5312051475048065e-002 + + 3.3918380737304688e-002 -6.5872561931610107e-001 + <_> + + 0 -1 164 6.2868902459740639e-003 + + 3.7710737437009811e-002 -6.0753965377807617e-001 + <_> + + 0 -1 121 -1.1290907859802246e-001 + + -7.0270425081253052e-001 3.0603762716054916e-002 + <_> + + 0 -1 424 -4.6465590596199036e-002 + + -3.3231839537620544e-001 8.5260793566703796e-002 + <_> + + 0 -1 496 -1.0819414258003235e-001 + + 4.8122748732566833e-001 -4.8520937561988831e-002 + <_> + + 0 -1 80 8.2417801022529602e-002 + + 3.6295656114816666e-002 -7.0041549205780029e-001 + <_> + + 0 -1 658 4.3378092348575592e-002 + + -5.1333811134099960e-002 4.9604579806327820e-001 + <_> + + 0 -1 81 1.3811363279819489e-001 + + 3.4755382686853409e-002 -7.0348078012466431e-001 + <_> + + 0 -1 784 -5.6846244260668755e-003 + + -4.8265266418457031e-001 4.3064594268798828e-002 + <_> + + 0 -1 539 -2.4593371152877808e-001 + + -7.0451593399047852e-001 2.6368476450443268e-002 + <_> + + 0 -1 789 5.3406208753585815e-003 + + 2.6500016450881958e-002 -6.3891279697418213e-001 + <_> + + 0 -1 747 4.7923019155859947e-003 + + -1.2080077081918716e-001 1.6787913441658020e-001 + <_> + + 0 -1 20 1.0585110634565353e-002 + + 3.5772342234849930e-002 -5.8977240324020386e-001 + <_> + + 0 -1 761 -2.5300860404968262e-002 + + 4.5570418238639832e-001 -4.8687241971492767e-002 + <_> + + 0 -1 509 -5.5532712489366531e-002 + + 2.9917666316032410e-001 -7.4741333723068237e-002 + <_> + + 0 -1 18 -3.2699136063456535e-003 + + 1.8934503197669983e-001 -1.1226788163185120e-001 + <_> + + 0 -1 265 1.7293278127908707e-002 + + -6.9134898483753204e-002 3.4179314970970154e-001 + <_> + + 0 -1 29 2.6402756571769714e-002 + + -7.0478275418281555e-002 3.1093484163284302e-001 + <_> + + 0 -1 224 2.8450414538383484e-002 + + -1.1974166333675385e-001 1.9457480311393738e-001 + <_> + + 0 -1 53 1.2562072835862637e-002 + + -8.3494104444980621e-002 2.7475124597549438e-001 + <_> + + 0 -1 115 5.5329114198684692e-002 + + 3.2291658222675323e-002 -6.5260422229766846e-001 + <_> + + 0 -1 16 -4.0723673999309540e-002 + + -5.5715459585189819e-001 3.3979110419750214e-002 + <_> + + 0 -1 279 -1.9029550254344940e-002 + + 3.0959266424179077e-001 -7.2507932782173157e-002 + <_> + + 0 -1 387 3.3815369009971619e-002 + + -1.1691040545701981e-001 1.8897840380668640e-001 + <_> + + 0 -1 453 2.2427774965763092e-002 + + -6.4937353134155273e-002 3.7149554491043091e-001 + <_> + + 0 -1 257 1.5668259933590889e-002 + + 5.2510060369968414e-002 -3.9585873484611511e-001 + <_> + + 0 -1 267 9.3314182013273239e-003 + + 4.0375780314207077e-002 -4.3617913126945496e-001 + <_> + + 0 -1 380 9.2742014676332474e-003 + + 9.6884138882160187e-002 -1.9626072049140930e-001 + <_> + + 0 -1 197 -1.5842571854591370e-002 + + -3.6402279138565063e-001 5.5363278836011887e-002 + <_> + + 0 -1 35 -2.2496590390801430e-003 + + 1.5552155673503876e-001 -1.1114847660064697e-001 + <_> + + 0 -1 470 -1.9214218854904175e-001 + + 2.4561734497547150e-001 -7.3354333639144897e-002 + <_> + + 0 -1 554 1.2042051181197166e-002 + + -8.5798375308513641e-002 2.5862255692481995e-001 + <_> + + 0 -1 787 5.0818910822272301e-003 + + -1.1264710128307343e-001 1.8084830045700073e-001 + <_> + + 0 -1 420 7.6445139944553375e-002 + + -1.0340440273284912e-001 1.8615351617336273e-001 + <_> + + 0 -1 711 -1.4945456758141518e-002 + + -7.3217189311981201e-001 2.5240363553166389e-002 + <_> + + 0 -1 417 1.1469788104295731e-001 + + 3.5731770098209381e-002 -4.3763482570648193e-001 + <_> + + 0 -1 102 -4.1331350803375244e-002 + + -6.7725455760955811e-001 2.0876549184322357e-002 + <_> + + 0 -1 759 -6.3697457313537598e-002 + + -5.0194233655929565e-001 2.8676675632596016e-002 + <_> + + 0 -1 454 -3.2679505646228790e-002 + + 5.5279546976089478e-001 -3.3861406147480011e-002 + <_> + + 0 -1 397 -3.0185556970536709e-003 + + 2.4731954932212830e-001 -7.7184990048408508e-002 + <_> + + 0 -1 177 -2.0480744540691376e-002 + + 2.7591237425804138e-001 -6.6020831465721130e-002 + <_> + + 0 -1 317 3.3862262964248657e-002 + + -6.8927489221096039e-002 2.5522756576538086e-001 + <_> + + 0 -1 360 -8.8570471853017807e-003 + + -6.0567396879196167e-001 2.8111936524510384e-002 + <_> + + 0 -1 390 -3.0993777327239513e-003 + + 1.3937081396579742e-001 -1.1177450418472290e-001 + <_> + + 0 -1 605 -1.3053707778453827e-002 + + -7.4933940172195435e-001 2.1189738065004349e-002 + <_> + + 0 -1 258 5.9927878901362419e-003 + + 2.2239523008465767e-002 -5.8804184198379517e-001 + <_> + + 0 -1 123 3.0308642890304327e-003 + + 2.4679224938154221e-002 -5.4468244314193726e-001 + <_> + + 0 -1 779 8.5215084254741669e-003 + + -6.8102687597274780e-002 2.4524471163749695e-001 + <_> + + 0 -1 534 -1.0024971328675747e-002 + + 2.5492498278617859e-001 -7.5317256152629852e-002 + <_> + + 0 -1 50 2.8498408198356628e-001 + + -1.4328868687152863e-001 1.5635497868061066e-001 + <_> + + 0 -1 66 -1.2144061736762524e-002 + + 2.7671325206756592e-001 -8.8052719831466675e-002 + <_> + + 0 -1 654 1.1470983736217022e-002 + + -6.9386981427669525e-002 2.4788482487201691e-001 + <_> + + 0 -1 327 -1.4382693171501160e-001 + + 3.0280208587646484e-001 -6.0519739985466003e-002 + <_> + + 0 -1 254 5.1055825315415859e-003 + + -9.2187143862247467e-002 1.9080857932567596e-001 + <_> + + 0 -1 332 -7.8170709311962128e-003 + + 2.3564057052135468e-001 -6.6252477467060089e-002 + <_> + + 0 -1 633 5.0649088807404041e-003 + + -6.5458901226520538e-002 2.5203970074653625e-001 + <_> + + 0 -1 632 -2.5227782316505909e-003 + + 2.4411727488040924e-001 -9.7643561661243439e-002 + <_> + + 0 -1 159 6.8352641537785530e-003 + + 2.0034790039062500e-002 -7.7160757780075073e-001 + <_> + + 0 -1 48 2.4185009533539414e-004 + + -9.6628785133361816e-002 1.6354230046272278e-001 + <_> + + 0 -1 579 -3.0329784378409386e-002 + + -5.6379920244216919e-001 2.6720838621258736e-002 + <_> + + 0 -1 139 4.9704704433679581e-003 + + 2.8160627931356430e-002 -4.7068077325820923e-001 + <_> + + 0 -1 487 -3.7729792296886444e-002 + + 2.1477498114109039e-001 -7.4596777558326721e-002 + <_> + + 0 -1 350 1.9093245267868042e-002 + + -8.7626710534095764e-002 1.7652207612991333e-001 + <_> + + 0 -1 143 4.4954958558082581e-001 + + 7.0079401135444641e-002 -2.9131263494491577e-001 + <_> + + 0 -1 670 7.3619596660137177e-003 + + -8.4028005599975586e-002 2.1018427610397339e-001 + <_> + + 0 -1 459 -1.6222636913880706e-003 + + 2.0056292414665222e-001 -9.6767894923686981e-002 + <_> + + 0 -1 572 4.5172423124313354e-002 + + -1.0514897853136063e-001 1.6722859442234039e-001 + <_> + + 0 -1 734 -1.6353441402316093e-002 + + 2.4717527627944946e-001 -7.3606699705123901e-002 + <_> + + 0 -1 575 2.6556827127933502e-002 + + -8.1566587090492249e-002 2.0803849399089813e-001 + <_> + + 0 -1 498 -4.7615066170692444e-002 + + 2.2300598025321960e-001 -7.7875822782516479e-002 + <_> + + 0 -1 574 2.3835271596908569e-002 + + 2.3376975208520889e-002 -6.6152364015579224e-001 + <_> + + 0 -1 226 1.3608166575431824e-001 + + 1.2909125536680222e-002 -8.3415579795837402e-001 + <_> + + 0 -1 559 -6.2542133964598179e-003 + + -3.6948135495185852e-001 3.5489104688167572e-002 + <_> + + 0 -1 277 -2.1813618019223213e-002 + + 4.6014159917831421e-001 -3.2023489475250244e-002 + <_> + + 0 -1 768 -6.0652187094092369e-003 + + -6.2025654315948486e-001 2.5077600032091141e-002 + <_> + + 0 -1 817 -2.4680690839886665e-003 + + 1.3315553963184357e-001 -1.0686720907688141e-001 + <_> + + 0 -1 819 -1.0383233428001404e-002 + + -3.9024323225021362e-001 3.6657121032476425e-002 + <_> + + 0 -1 430 -1.4558293856680393e-002 + + 3.0595546960830688e-001 -4.6124905347824097e-002 + + <_> + 100 + -1.9893888235092163e+000 + + <_> + + 0 -1 637 -3.6353822797536850e-002 + + 5.7490348815917969e-001 -6.0402154922485352e-001 + <_> + + 0 -1 209 3.3910516649484634e-002 + + -4.7268575429916382e-001 2.9836708307266235e-001 + <_> + + 0 -1 568 -3.1085191294550896e-002 + + 2.9725867509841919e-001 -2.5986751914024353e-001 + <_> + + 0 -1 40 -3.3315129578113556e-002 + + 5.8938282728195190e-001 -1.0283301770687103e-001 + <_> + + 0 -1 176 2.0765572786331177e-002 + + -1.1350679397583008e-001 4.3589678406715393e-001 + <_> + + 0 -1 188 -2.2746603935956955e-002 + + 5.4989874362945557e-001 -1.0095789283514023e-001 + <_> + + 0 -1 189 -1.3580635190010071e-002 + + 3.2243436574935913e-001 -1.4773033559322357e-001 + <_> + + 0 -1 412 -7.5849056243896484e-002 + + 6.1856883764266968e-001 -5.8312434703111649e-002 + <_> + + 0 -1 300 1.1116056144237518e-001 + + -6.4182795584201813e-002 8.3322328329086304e-001 + <_> + + 0 -1 794 -6.2784920446574688e-003 + + -5.6858378648757935e-001 6.3030011951923370e-002 + <_> + + 0 -1 486 1.5065364539623260e-001 + + -5.0163157284259796e-002 5.1983052492141724e-001 + <_> + + 0 -1 438 -1.3567157089710236e-001 + + 4.4820526242256165e-001 -7.0899941027164459e-002 + <_> + + 0 -1 689 1.8747521564364433e-002 + + -6.7941129207611084e-002 5.3145283460617065e-001 + <_> + + 0 -1 68 5.4262429475784302e-002 + + -1.0494362562894821e-001 2.9918253421783447e-001 + <_> + + 0 -1 576 -1.1378649622201920e-001 + + 4.7880446910858154e-001 -7.1374006569385529e-002 + <_> + + 0 -1 684 -2.1417509764432907e-002 + + 3.3607813715934753e-001 -9.9478617310523987e-002 + <_> + + 0 -1 354 1.1753311753273010e-001 + + -3.9848849177360535e-002 6.0107874870300293e-001 + <_> + + 0 -1 260 -7.7472939155995846e-003 + + -6.9318675994873047e-001 4.5742511749267578e-002 + <_> + + 0 -1 558 -2.9795799404382706e-002 + + 4.2947125434875488e-001 -7.1105487644672394e-002 + <_> + + 0 -1 151 -1.8132720142602921e-002 + + -2.5635355710983276e-001 1.0910239070653915e-001 + <_> + + 0 -1 76 8.1834137439727783e-002 + + 1.1094906181097031e-001 -2.0392002165317535e-001 + <_> + + 0 -1 111 1.1528218165040016e-002 + + 5.3910095244646072e-002 -4.5931258797645569e-001 + <_> + + 0 -1 514 2.8621554374694824e-002 + + -1.4410360157489777e-001 1.6356547176837921e-001 + <_> + + 0 -1 618 1.7922203987836838e-002 + + -4.8119675368070602e-002 4.8948758840560913e-001 + <_> + + 0 -1 773 2.0611037034541368e-003 + + -1.4414174854755402e-001 1.7143261432647705e-001 + <_> + + 0 -1 310 -1.6526800394058228e-001 + + 3.0243551731109619e-001 -7.7524982392787933e-002 + <_> + + 0 -1 477 -6.2977060675621033e-002 + + -5.2628058195114136e-001 4.2716968804597855e-002 + <_> + + 0 -1 485 -3.6092847585678101e-001 + + 2.8676527738571167e-001 -7.3810704052448273e-002 + <_> + + 0 -1 739 -4.9367453902959824e-003 + + 3.0023676156997681e-001 -6.8351514637470245e-002 + <_> + + 0 -1 140 -6.7799808457493782e-003 + + -4.4271990656852722e-001 4.8298392444849014e-002 + <_> + + 0 -1 471 1.3567670248448849e-002 + + -8.0314539372920990e-002 2.7058762311935425e-001 + <_> + + 0 -1 205 8.7900757789611816e-003 + + 5.6896943598985672e-002 -3.9116287231445313e-001 + <_> + + 0 -1 671 1.7109028995037079e-002 + + -8.4821760654449463e-002 2.4214385449886322e-001 + <_> + + 0 -1 252 1.0548608750104904e-001 + + 2.7761165052652359e-002 -7.8180325031280518e-001 + <_> + + 0 -1 359 -1.2998120859265327e-002 + + 2.5417125225067139e-001 -7.5519964098930359e-002 + <_> + + 0 -1 375 -1.9761435687541962e-002 + + 2.2261811792850494e-001 -9.8095886409282684e-002 + <_> + + 0 -1 292 -3.2749888487160206e-003 + + 3.3720037341117859e-001 -7.0348791778087616e-002 + <_> + + 0 -1 414 5.2116528153419495e-002 + + -1.0506465286016464e-001 1.8207317590713501e-001 + <_> + + 0 -1 234 1.1137709021568298e-001 + + 3.5336300730705261e-002 -6.6138511896133423e-001 + <_> + + 0 -1 117 -4.1943341493606567e-003 + + -3.3382201194763184e-001 5.0874624401330948e-002 + <_> + + 0 -1 291 1.6809567809104919e-001 + + 2.4236176162958145e-002 -6.8939536809921265e-001 + <_> + + 0 -1 338 -3.3826553262770176e-003 + + -6.5884113311767578e-001 2.2103810682892799e-002 + <_> + + 0 -1 288 8.5055597126483917e-002 + + 2.1747881546616554e-002 -6.3618826866149902e-001 + <_> + + 0 -1 45 -5.7586412876844406e-003 + + 2.1060591936111450e-001 -9.1170653700828552e-002 + <_> + + 0 -1 77 -3.6941014230251312e-002 + + -6.9123482704162598e-001 2.8818784281611443e-002 + <_> + + 0 -1 766 1.1866136454045773e-002 + + -8.6081407964229584e-002 2.1623598039150238e-001 + <_> + + 0 -1 247 -1.8231317400932312e-002 + + 3.1362554430961609e-001 -6.1600871384143829e-002 + <_> + + 0 -1 324 -7.3889703489840031e-003 + + 3.0826017260551453e-001 -5.8532096445560455e-002 + <_> + + 0 -1 732 5.1691643893718719e-003 + + 2.9993735253810883e-002 -6.2977868318557739e-001 + <_> + + 0 -1 667 -1.2862924486398697e-002 + + -4.3736469745635986e-001 3.2921057194471359e-002 + <_> + + 0 -1 703 -2.2983271628618240e-002 + + 2.6511108875274658e-001 -6.6909156739711761e-002 + <_> + + 0 -1 674 3.9090830832719803e-003 + + -1.0127128660678864e-001 2.0073010027408600e-001 + <_> + + 0 -1 744 -9.0994630008935928e-003 + + 1.3385884463787079e-001 -1.3561423122882843e-001 + <_> + + 0 -1 796 -5.6763380765914917e-002 + + 5.6630617380142212e-001 -3.2809354364871979e-002 + <_> + + 0 -1 259 4.8070675693452358e-003 + + 4.4194158166646957e-002 -4.0204837918281555e-001 + <_> + + 0 -1 340 -1.0450733825564384e-002 + + 2.3827658593654633e-001 -7.6380252838134766e-002 + <_> + + 0 -1 256 -1.7678029835224152e-002 + + 2.1672835946083069e-001 -7.9985588788986206e-002 + <_> + + 0 -1 67 2.7773942798376083e-002 + + -4.5684009790420532e-002 3.9579004049301147e-001 + <_> + + 0 -1 474 8.7843492627143860e-002 + + 3.4366942942142487e-002 -5.3730368614196777e-001 + <_> + + 0 -1 166 1.9426442682743073e-002 + + -9.9758401513099670e-002 2.0479789376258850e-001 + <_> + + 0 -1 443 -1.6408386826515198e-001 + + 3.7857857346534729e-001 -5.3978055715560913e-002 + <_> + + 0 -1 361 -5.6890182197093964e-002 + + 3.2869496941566467e-001 -4.6734869480133057e-002 + <_> + + 0 -1 73 -5.0815895199775696e-002 + + -4.7677189111709595e-001 3.4135986119508743e-002 + <_> + + 0 -1 285 -3.7362597882747650e-002 + + 4.8422592878341675e-001 -4.0414333343505859e-002 + <_> + + 0 -1 144 -2.7112174779176712e-002 + + -6.0966557264328003e-001 2.7988156303763390e-002 + <_> + + 0 -1 49 -4.5164238661527634e-002 + + -8.6822801828384399e-001 1.2851182371377945e-002 + <_> + + 0 -1 680 5.1760412752628326e-003 + + 2.9443990439176559e-002 -4.3073326349258423e-001 + <_> + + 0 -1 478 -8.4172144532203674e-002 + + 2.9436424374580383e-001 -4.9916967749595642e-002 + <_> + + 0 -1 458 1.9042786210775375e-002 + + -3.4412886947393417e-002 4.4832718372344971e-001 + <_> + + 0 -1 478 5.7432703673839569e-002 + + -7.7486172318458557e-002 1.8293291330337524e-001 + <_> + + 0 -1 86 1.7984922975301743e-002 + + -9.2546351253986359e-002 1.8607094883918762e-001 + <_> + + 0 -1 572 4.1388571262359619e-002 + + -1.0863362997770309e-001 1.5282210707664490e-001 + <_> + + 0 -1 43 9.1915345191955566e-001 + + 2.8560444712638855e-002 -6.0594308376312256e-001 + <_> + + 0 -1 532 -1.9133273512125015e-002 + + -8.5332053899765015e-001 1.2371141463518143e-002 + <_> + + 0 -1 809 9.7082778811454773e-003 + + 1.2650677002966404e-002 -8.1339740753173828e-001 + <_> + + 0 -1 676 -8.9433819055557251e-002 + + -5.3538358211517334e-001 2.1225364878773689e-002 + <_> + + 0 -1 416 -6.7972466349601746e-002 + + 2.2748632729053497e-001 -6.3480392098426819e-002 + <_> + + 0 -1 660 -3.7654261104762554e-003 + + 2.2040548920631409e-001 -6.8912781774997711e-002 + <_> + + 0 -1 425 2.3539884388446808e-001 + + 1.8962537869811058e-002 -7.3649096488952637e-001 + <_> + + 0 -1 682 8.4126703441143036e-003 + + 1.6248503699898720e-002 -6.6107767820358276e-001 + <_> + + 0 -1 0 5.9240809641778469e-003 + + 1.2709932401776314e-002 -7.9893755912780762e-001 + <_> + + 0 -1 30 5.7852733880281448e-003 + + 1.3032922521233559e-002 -7.1413958072662354e-001 + <_> + + 0 -1 326 -6.7011862993240356e-002 + + -9.6618568897247314e-001 1.0199193842709064e-002 + <_> + + 0 -1 802 6.2955496832728386e-003 + + 1.1003237217664719e-002 -7.9627370834350586e-001 + <_> + + 0 -1 308 2.2338733077049255e-001 + + 1.8742179498076439e-002 -5.6901288032531738e-001 + <_> + + 0 -1 244 6.9172801449894905e-003 + + 1.4789959415793419e-002 -7.2332131862640381e-001 + <_> + + 0 -1 555 -6.9728232920169830e-002 + + -7.3287606239318848e-001 1.2337258085608482e-002 + <_> + + 0 -1 491 1.0706198960542679e-001 + + 1.7545098438858986e-002 -5.9982377290725708e-001 + <_> + + 0 -1 186 1.6913741827011108e-002 + + 2.2844947874546051e-002 -4.8921287059783936e-001 + <_> + + 0 -1 460 -8.7020844221115112e-003 + + -7.5956517457962036e-001 1.3844862580299377e-002 + <_> + + 0 -1 296 7.9813569784164429e-002 + + 1.8443970009684563e-002 -6.0584050416946411e-001 + <_> + + 0 -1 263 2.1565157920122147e-002 + + -4.1093282401561737e-002 3.2994875311851501e-001 + <_> + + 0 -1 507 -1.1819498240947723e-001 + + 7.6440620422363281e-001 -1.5310727991163731e-002 + <_> + + 0 -1 13 -1.1644796468317509e-002 + + 1.5059669315814972e-001 -8.5400067269802094e-002 + <_> + + 0 -1 136 4.8412360250949860e-002 + + 1.5801191329956055e-002 -8.1906789541244507e-001 + <_> + + 0 -1 447 -9.1522522270679474e-003 + + 1.2237263470888138e-001 -1.0453698039054871e-001 + <_> + + 0 -1 282 1.0599581897258759e-001 + + -3.7372149527072906e-002 3.8735589385032654e-001 + <_> + + 0 -1 769 3.6802445538341999e-003 + + -7.8164376318454742e-002 1.5969018638134003e-001 + <_> + + 0 -1 51 4.8344388604164124e-002 + + 2.6674685999751091e-002 -4.7453081607818604e-001 + <_> + + 0 -1 535 -1.2036672234535217e-001 + + 2.0954853296279907e-001 -6.7930959165096283e-002 + + <_> + 100 + -2.0882713794708252e+000 + + <_> + + 0 -1 588 1.1892063915729523e-001 + + -6.1742293834686279e-001 4.5307034254074097e-001 + <_> + + 0 -1 713 -2.0387375727295876e-002 + + 2.7144163846969604e-001 -4.6928820013999939e-001 + <_> + + 0 -1 529 1.2590212747454643e-002 + + -2.0108436048030853e-001 3.8105344772338867e-001 + <_> + + 0 -1 163 2.0354434847831726e-002 + + -1.6400215029716492e-001 2.7574071288108826e-001 + <_> + + 0 -1 96 -2.7795214205980301e-002 + + 3.7839883565902710e-001 -1.1305580288171768e-001 + <_> + + 0 -1 348 1.4193280041217804e-001 + + -6.0474541038274765e-002 6.2733256816864014e-001 + <_> + + 0 -1 206 5.4513983428478241e-002 + + -1.0904555022716522e-001 2.6557052135467529e-001 + <_> + + 0 -1 587 -9.3165330588817596e-002 + + 5.9793907403945923e-001 -2.6734948158264160e-002 + <_> + + 0 -1 551 -1.1328049004077911e-001 + + 5.4262262582778931e-001 -6.9828927516937256e-002 + <_> + + 0 -1 488 -5.2978955209255219e-002 + + 3.7499949336051941e-001 -8.6347490549087524e-002 + <_> + + 0 -1 240 1.6652107238769531e-002 + + -1.1925343424081802e-001 2.8592520952224731e-001 + <_> + + 0 -1 398 -1.2966150417923927e-002 + + 2.5650292634963989e-001 -1.0981626063585281e-001 + <_> + + 0 -1 24 -3.0883198976516724e-001 + + 5.6538671255111694e-001 -4.8163380473852158e-002 + <_> + + 0 -1 542 4.0199004113674164e-002 + + -8.2143485546112061e-002 2.9318833351135254e-001 + <_> + + 0 -1 431 -4.2654491961002350e-002 + + 3.3214175701141357e-001 -7.2890780866146088e-002 + <_> + + 0 -1 569 -8.7233863770961761e-002 + + -6.9970768690109253e-001 3.0839594081044197e-002 + <_> + + 0 -1 646 -1.9263501744717360e-003 + + -3.0089509487152100e-001 6.1933990567922592e-002 + <_> + + 0 -1 127 -1.2266384437680244e-002 + + -4.4856789708137512e-001 5.1748808473348618e-002 + <_> + + 0 -1 725 2.5618553161621094e-002 + + -5.5925786495208740e-002 3.6526843905448914e-001 + <_> + + 0 -1 436 7.2159399278461933e-003 + + -7.6918721199035645e-002 2.5979781150817871e-001 + <_> + + 0 -1 12 1.8935009837150574e-002 + + -1.6115440428256989e-001 1.1890874803066254e-001 + <_> + + 0 -1 760 8.4126051515340805e-003 + + 1.0076313465833664e-001 -1.8971538543701172e-001 + <_> + + 0 -1 449 -1.0874198749661446e-002 + + -2.2683894634246826e-001 8.8565327227115631e-002 + <_> + + 0 -1 546 -9.8901830613613129e-002 + + 4.1373634338378906e-001 -5.5995136499404907e-002 + <_> + + 0 -1 320 1.3641971349716187e-001 + + 2.8648722916841507e-002 -6.5269929170608521e-001 + <_> + + 0 -1 391 -4.7755786217749119e-003 + + 1.3905292749404907e-001 -1.3267673552036285e-001 + <_> + + 0 -1 204 -3.6639358848333359e-002 + + 3.0246520042419434e-001 -6.5013952553272247e-002 + <_> + + 0 -1 349 4.6268589794635773e-002 + + -9.9801562726497650e-002 2.2881330549716949e-001 + <_> + + 0 -1 20 1.1634247377514839e-002 + + 2.9752535745501518e-002 -6.4520883560180664e-001 + <_> + + 0 -1 7 8.7490290403366089e-002 + + 3.2797992229461670e-002 -4.8356053233146667e-001 + <_> + + 0 -1 213 -5.4423720575869083e-003 + + -4.2312434315681458e-001 4.0884345769882202e-002 + <_> + + 0 -1 101 1.5459659509360790e-002 + + -8.1535100936889648e-002 2.2442305088043213e-001 + <_> + + 0 -1 643 -1.4734671451151371e-002 + + 1.7989698052406311e-001 -1.2625794112682343e-001 + <_> + + 0 -1 357 2.0615854859352112e-001 + + 3.9496298879384995e-002 -4.1161277890205383e-001 + <_> + + 0 -1 287 -1.4983332157135010e-001 + + -6.1346882581710815e-001 2.8787860646843910e-002 + <_> + + 0 -1 316 4.1250735521316528e-003 + + -5.9485599398612976e-002 3.2962897419929504e-001 + <_> + + 0 -1 301 3.0726704746484756e-002 + + -1.2685997784137726e-001 1.2820862233638763e-001 + <_> + + 0 -1 511 -3.5758357495069504e-002 + + 3.0264523625373840e-001 -6.0329668223857880e-002 + <_> + + 0 -1 493 3.2861482352018356e-002 + + -6.9495856761932373e-002 2.6029106974601746e-001 + <_> + + 0 -1 207 2.1980709861963987e-003 + + -9.6626468002796173e-002 1.6587738692760468e-001 + <_> + + 0 -1 469 2.4153670296072960e-002 + + -8.6987555027008057e-002 2.0683506131172180e-001 + <_> + + 0 -1 370 -1.5082080848515034e-002 + + 3.3427235484123230e-001 -4.6475376933813095e-002 + <_> + + 0 -1 697 -4.0052030235528946e-003 + + 1.6450133919715881e-001 -1.0323253273963928e-001 + <_> + + 0 -1 573 -2.7856476604938507e-002 + + -5.2609956264495850e-001 2.7226824313402176e-002 + <_> + + 0 -1 765 -2.0476717501878738e-002 + + 3.5776519775390625e-001 -4.5102179050445557e-002 + <_> + + 0 -1 767 6.6039813682436943e-003 + + -8.2313135266304016e-002 1.8757410347461700e-001 + <_> + + 0 -1 180 1.9800897687673569e-002 + + -6.2550969421863556e-002 2.5798192620277405e-001 + <_> + + 0 -1 452 -6.6496878862380981e-003 + + 1.7883610725402832e-001 -1.0978580266237259e-001 + <_> + + 0 -1 198 2.0809392444789410e-003 + + 6.4188353717327118e-002 -2.5249513983726501e-001 + <_> + + 0 -1 196 -1.8214372918009758e-002 + + -5.1439154148101807e-001 3.1133541837334633e-002 + <_> + + 0 -1 506 -2.4209341406822205e-001 + + 2.2937053442001343e-001 -7.4214383959770203e-002 + <_> + + 0 -1 415 -2.1343454718589783e-002 + + 1.9603218138217926e-001 -8.9379556477069855e-002 + <_> + + 0 -1 169 2.0123381167650223e-002 + + -4.8318199813365936e-002 3.0825397372245789e-001 + <_> + + 0 -1 604 5.6990878656506538e-003 + + 3.8014207035303116e-002 -4.2261347174644470e-001 + <_> + + 0 -1 141 8.5393609479069710e-003 + + 2.7884945273399353e-002 -4.6396487951278687e-001 + <_> + + 0 -1 692 -3.0534571036696434e-002 + + 3.8758745789527893e-001 -4.0188502520322800e-002 + <_> + + 0 -1 681 4.4068414717912674e-003 + + 3.0290151014924049e-002 -4.6877279877662659e-001 + <_> + + 0 -1 709 1.4089973410591483e-003 + + -8.9484535157680511e-002 1.6149339079856873e-001 + <_> + + 0 -1 778 -2.8641782701015472e-003 + + -3.4671550989151001e-001 4.0905058383941650e-002 + <_> + + 0 -1 105 -2.1180357784032822e-002 + + 2.6447567343711853e-001 -5.6043967604637146e-002 + <_> + + 0 -1 585 -8.3004087209701538e-002 + + 6.8283176422119141e-001 -2.1583627909421921e-002 + <_> + + 0 -1 299 -3.0913855880498886e-002 + + 2.7285331487655640e-001 -4.8811897635459900e-002 + <_> + + 0 -1 42 -3.0797538347542286e-003 + + 1.6655747592449188e-001 -1.0710606724023819e-001 + <_> + + 0 -1 194 1.1443568393588066e-002 + + 5.7899076491594315e-002 -2.7548545598983765e-001 + <_> + + 0 -1 686 -1.1415679007768631e-002 + + 2.0206256210803986e-001 -6.9681018590927124e-002 + <_> + + 0 -1 359 1.8940184265375137e-002 + + -6.7144595086574554e-002 2.3748835921287537e-001 + <_> + + 0 -1 822 2.2566128522157669e-002 + + -6.4958825707435608e-002 2.3299516737461090e-001 + <_> + + 0 -1 622 -7.9396709799766541e-002 + + 5.0319761037826538e-001 -2.9171440750360489e-002 + <_> + + 0 -1 47 4.2561900615692139e-001 + + -1.1196137964725494e-001 1.4227935671806335e-001 + <_> + + 0 -1 427 4.4270299375057220e-002 + + -2.9513187706470490e-002 5.2951472997665405e-001 + <_> + + 0 -1 74 1.7174175009131432e-002 + + -4.0488652884960175e-002 3.8944888114929199e-001 + <_> + + 0 -1 820 -1.7377794720232487e-003 + + -6.9514840841293335e-001 2.1108377724885941e-002 + <_> + + 0 -1 309 -2.2039374709129333e-001 + + -7.7504026889801025e-001 1.4507055282592773e-002 + <_> + + 0 -1 721 7.2579132393002510e-004 + + -1.2043432891368866e-001 1.2195762246847153e-001 + <_> + + 0 -1 803 4.6488004736602306e-003 + + 5.2685238420963287e-002 -2.5253716111183167e-001 + <_> + + 0 -1 211 -1.1456177569925785e-002 + + 2.0801055431365967e-001 -6.8232141435146332e-002 + <_> + + 0 -1 173 1.9705438986420631e-002 + + -9.8248891532421112e-002 1.5582084655761719e-001 + <_> + + 0 -1 378 -1.8179285526275635e-001 + + -4.8372307419776917e-001 2.8137721121311188e-002 + <_> + + 0 -1 235 6.6751101985573769e-003 + + 7.0301778614521027e-002 -1.9548210501670837e-001 + <_> + + 0 -1 756 8.6226612329483032e-003 + + -6.6171310842037201e-002 2.2881644964218140e-001 + <_> + + 0 -1 306 -1.2309605255723000e-002 + + -1.6088645160198212e-001 7.8796282410621643e-002 + <_> + + 0 -1 383 -1.4574723318219185e-002 + + 4.7432678937911987e-001 -3.2390803098678589e-002 + <_> + + 0 -1 178 6.3157223165035248e-002 + + 2.0668696612119675e-002 -6.6265499591827393e-001 + <_> + + 0 -1 15 -1.3862394727766514e-002 + + -5.8219170570373535e-001 1.6982229426503181e-002 + <_> + + 0 -1 612 1.3356646895408630e-001 + + 1.6412599012255669e-002 -6.2750118970870972e-001 + <_> + + 0 -1 659 -4.4732712209224701e-002 + + 3.2250836491584778e-001 -4.3704714626073837e-002 + <_> + + 0 -1 374 -6.0449633747339249e-003 + + -4.8467743396759033e-001 2.6045855134725571e-002 + <_> + + 0 -1 755 4.8574481159448624e-002 + + 1.7532445490360260e-002 -6.0437834262847900e-001 + <_> + + 0 -1 323 -1.6868990659713745e-001 + + 2.5659248232841492e-001 -4.6175695955753326e-002 + <_> + + 0 -1 595 -2.6491273194551468e-002 + + 1.9132797420024872e-001 -7.6470151543617249e-002 + <_> + + 0 -1 727 1.6412120312452316e-002 + + -8.9943401515483856e-002 1.5353266894817352e-001 + <_> + + 0 -1 638 -1.2895902991294861e-001 + + -5.4557460546493530e-001 2.2016519680619240e-002 + <_> + + 0 -1 644 -6.6651748493313789e-003 + + -5.3426635265350342e-001 1.9773421809077263e-002 + <_> + + 0 -1 362 1.9235624931752682e-003 + + -1.2044542282819748e-001 9.8064243793487549e-002 + <_> + + 0 -1 9 8.6902552843093872e-001 + + 1.8667848780751228e-002 -6.3924860954284668e-001 + <_> + + 0 -1 3 1.6741793602705002e-002 + + 2.2372758015990257e-002 -5.0248688459396362e-001 + <_> + + 0 -1 625 -2.5590710341930389e-002 + + 2.8624305129051208e-001 -4.1666794568300247e-002 + <_> + + 0 -1 664 -1.5453767031431198e-002 + + -5.7585281133651733e-001 2.0682098343968391e-002 + <_> + + 0 -1 805 -1.9419237505644560e-003 + + -8.1982332468032837e-001 1.0764360427856445e-002 + <_> + + 0 -1 566 3.2704328186810017e-003 + + -4.8110716044902802e-002 2.5348010659217834e-001 + + <_> + 100 + -2.0704312324523926e+000 + + <_> + + 0 -1 450 -1.0077046602964401e-001 + + 4.5175901055335999e-001 -6.1467176675796509e-001 + <_> + + 0 -1 215 6.5452642738819122e-003 + + -3.5731741786003113e-001 3.3717277646064758e-001 + <_> + + 0 -1 238 1.9674018025398254e-002 + + -2.4631671607494354e-001 2.7475911378860474e-001 + <_> + + 0 -1 540 7.0211179554462433e-002 + + -8.7460011243820190e-002 6.2532955408096313e-001 + <_> + + 0 -1 489 1.4418694376945496e-001 + + -7.2135701775550842e-002 5.7573407888412476e-001 + <_> + + 0 -1 518 -1.2962925434112549e-001 + + 7.5993639230728149e-001 -3.5770721733570099e-002 + <_> + + 0 -1 83 6.3010111451148987e-002 + + -1.3292062282562256e-001 2.8952628374099731e-001 + <_> + + 0 -1 408 4.6381396241486073e-003 + + 8.4557555615901947e-002 -3.6389535665512085e-001 + <_> + + 0 -1 515 -5.9499843046069145e-003 + + 2.1222642064094543e-001 -1.3255165517330170e-001 + <_> + + 0 -1 441 -1.2947644293308258e-001 + + 7.1918481588363647e-001 -3.8779754191637039e-002 + <_> + + 0 -1 429 -4.1396625339984894e-002 + + 4.1296491026878357e-001 -2.4519389495253563e-002 + <_> + + 0 -1 266 -3.8469914346933365e-002 + + 3.4920585155487061e-001 -8.9438758790493011e-002 + <_> + + 0 -1 640 9.1865241527557373e-002 + + -9.4548538327217102e-002 3.2806205749511719e-001 + <_> + + 0 -1 283 1.1909760534763336e-001 + + -5.7471185922622681e-002 4.8986643552780151e-001 + <_> + + 0 -1 645 -5.0334664992988110e-003 + + -3.0589446425437927e-001 9.0836197137832642e-002 + <_> + + 0 -1 688 1.1820561485365033e-003 + + -1.2910071015357971e-001 1.6918015480041504e-001 + <_> + + 0 -1 791 1.4982650056481361e-002 + + 4.2109638452529907e-002 -5.4996389150619507e-001 + <_> + + 0 -1 786 -6.9685000926256180e-003 + + -4.1965305805206299e-001 5.1458306610584259e-002 + <_> + + 0 -1 771 1.1164390016347170e-003 + + -1.5750421583652496e-001 1.4960579574108124e-001 + <_> + + 0 -1 32 -1.8970571458339691e-002 + + 4.1179320216178894e-001 -4.6856414526700974e-002 + <_> + + 0 -1 581 1.4439454302191734e-002 + + -8.0535419285297394e-002 2.7214762568473816e-001 + <_> + + 0 -1 510 -9.5121651887893677e-002 + + 3.6779737472534180e-001 -5.7101279497146606e-002 + <_> + + 0 -1 419 -8.9814327657222748e-003 + + 1.8265065550804138e-001 -1.4160120487213135e-001 + <_> + + 0 -1 537 -2.2633418440818787e-002 + + 3.6102777719497681e-001 -5.1901526749134064e-002 + <_> + + 0 -1 593 -2.2710688412189484e-002 + + 1.1053603142499924e-001 -1.7578089237213135e-001 + <_> + + 0 -1 184 1.5642758458852768e-002 + + -7.9318910837173462e-002 3.1653311848640442e-001 + <_> + + 0 -1 606 -3.3173859119415283e-003 + + -3.3540964126586914e-001 5.5748481303453445e-002 + <_> + + 0 -1 106 5.8215018361806870e-003 + + 5.7110719382762909e-002 -2.8086242079734802e-001 + <_> + + 0 -1 1 -7.6883025467395782e-003 + + 1.1708685010671616e-001 -1.4618489146232605e-001 + <_> + + 0 -1 405 1.0860338807106018e-002 + + -7.2233915328979492e-002 2.5490903854370117e-001 + <_> + + 0 -1 135 -2.2606331855058670e-002 + + -6.1050999164581299e-001 2.7791287750005722e-002 + <_> + + 0 -1 232 3.9641276001930237e-002 + + 1.4674142934381962e-002 -8.0721253156661987e-001 + <_> + + 0 -1 523 -7.5482949614524841e-003 + + 3.2877162098884583e-001 -5.2556037902832031e-002 + <_> + + 0 -1 503 1.8739406019449234e-002 + + -1.3022327423095703e-001 1.4352528750896454e-001 + <_> + + 0 -1 157 5.5137358605861664e-002 + + 3.3469397574663162e-002 -5.5188298225402832e-001 + <_> + + 0 -1 241 2.7504984289407730e-002 + + -5.3431924432516098e-002 3.2144358754158020e-001 + <_> + + 0 -1 724 1.2589400634169579e-002 + + 8.4676288068294525e-002 -2.3093448579311371e-001 + <_> + + 0 -1 208 -1.0715539753437042e-001 + + 3.4335365891456604e-001 -4.8208847641944885e-002 + <_> + + 0 -1 274 -1.6146148741245270e-001 + + -4.9193075299263000e-001 3.3508080989122391e-002 + <_> + + 0 -1 816 -2.3257080465555191e-003 + + 1.2611246109008789e-001 -1.2348042428493500e-001 + <_> + + 0 -1 255 1.0999076068401337e-002 + + -7.3867186903953552e-002 2.2680267691612244e-001 + <_> + + 0 -1 148 3.0133351683616638e-003 + + 2.8249852359294891e-002 -6.2205773591995239e-001 + <_> + + 0 -1 404 -7.3815938085317612e-003 + + 1.4106234908103943e-001 -1.0592179000377655e-001 + <_> + + 0 -1 641 -9.9609285593032837e-002 + + -5.0706058740615845e-001 2.9250498861074448e-002 + <_> + + 0 -1 401 -1.6993083059787750e-002 + + -8.0253732204437256e-001 1.4262860640883446e-002 + <_> + + 0 -1 276 -3.4000636078417301e-003 + + -6.3281798362731934e-001 1.8125489354133606e-002 + <_> + + 0 -1 440 -1.1020098812878132e-002 + + -5.1488196849822998e-001 2.3795366287231445e-002 + <_> + + 0 -1 298 -1.3264201581478119e-002 + + -1.7508319020271301e-001 8.1054925918579102e-002 + <_> + + 0 -1 437 1.8977813422679901e-002 + + -7.0323936641216278e-002 2.6073047518730164e-001 + <_> + + 0 -1 375 -1.3009980320930481e-002 + + 1.5854099392890930e-001 -1.0393854975700378e-001 + <_> + + 0 -1 482 9.2145306989550591e-003 + + -9.3604646623134613e-002 2.3740513622760773e-001 + <_> + + 0 -1 702 3.0485898256301880e-002 + + 2.5209117680788040e-002 -6.1866730451583862e-001 + <_> + + 0 -1 65 -1.6016994416713715e-001 + + -7.2219741344451904e-001 1.5122707001864910e-002 + <_> + + 0 -1 614 4.5530922710895538e-002 + + -6.3044443726539612e-002 2.4358576536178589e-001 + <_> + + 0 -1 743 1.0203102603554726e-002 + + -6.1121750622987747e-002 2.7015075087547302e-001 + <_> + + 0 -1 525 2.4663940072059631e-002 + + -4.2811531573534012e-002 3.3221286535263062e-001 + <_> + + 0 -1 750 -9.1385319828987122e-003 + + 1.1204078793525696e-001 -1.2999817728996277e-001 + <_> + + 0 -1 227 1.9347546622157097e-002 + + 1.6758413985371590e-002 -7.7254980802536011e-001 + <_> + + 0 -1 748 -9.6733132377266884e-003 + + -4.1074886918067932e-001 3.1290818005800247e-002 + <_> + + 0 -1 712 -1.8869692459702492e-002 + + -5.1710635423660278e-001 2.5810025632381439e-002 + <_> + + 0 -1 578 -3.2522417604923248e-002 + + -6.0281151533126831e-001 1.8763946369290352e-002 + <_> + + 0 -1 322 2.7749570086598396e-002 + + 2.4084037169814110e-002 -4.8589682579040527e-001 + <_> + + 0 -1 195 -9.6951154991984367e-003 + + -4.5413139462471008e-001 2.6410164311528206e-002 + <_> + + 0 -1 797 5.4729478433728218e-003 + + 1.4941636472940445e-002 -7.1346837282180786e-001 + <_> + + 0 -1 352 -7.0067740976810455e-002 + + 4.2590034008026123e-001 -3.4890078008174896e-002 + <_> + + 0 -1 31 1.1978195980191231e-002 + + 2.7581827715039253e-002 -5.2088767290115356e-001 + <_> + + 0 -1 677 2.4962129071354866e-002 + + 1.3511617667973042e-002 -7.7897942066192627e-001 + <_> + + 0 -1 230 1.5438200533390045e-001 + + -6.4114958047866821e-002 2.0813925564289093e-001 + <_> + + 0 -1 61 9.9275723099708557e-002 + + 2.7019618079066277e-002 -5.1581478118896484e-001 + <_> + + 0 -1 377 -1.6580173373222351e-001 + + -4.4263607263565063e-001 2.9533190652728081e-002 + <_> + + 0 -1 521 -7.3407413437962532e-003 + + 1.6750444471836090e-001 -9.2496410012245178e-002 + <_> + + 0 -1 701 -6.5014618448913097e-003 + + 2.4347235262393951e-001 -6.4470589160919189e-002 + <_> + + 0 -1 580 1.0067591443657875e-002 + + -4.6819631010293961e-002 3.1546166539192200e-001 + <_> + + 0 -1 131 6.4292205497622490e-003 + + -1.0853879153728485e-001 1.3230116665363312e-001 + <_> + + 0 -1 275 3.1485879421234131e-001 + + -4.5282851904630661e-002 3.3699107170104980e-001 + <_> + + 0 -1 14 2.5760228745639324e-003 + + 2.8863398358225822e-002 -5.5243116617202759e-001 + <_> + + 0 -1 598 -2.4543633684515953e-002 + + 2.5651532411575317e-001 -6.0512304306030273e-002 + <_> + + 0 -1 775 2.5632018223404884e-003 + + 2.5235766544938087e-002 -5.5885905027389526e-001 + <_> + + 0 -1 758 -7.8137237578630447e-003 + + 2.5905117392539978e-001 -5.0864595919847488e-002 + <_> + + 0 -1 384 -2.1051995456218719e-002 + + 4.2795905470848083e-001 -3.3333282917737961e-002 + <_> + + 0 -1 59 5.1057189702987671e-003 + + 4.5829366892576218e-002 -2.9227322340011597e-001 + <_> + + 0 -1 27 2.7099503204226494e-003 + + -1.1747492104768753e-001 1.1283197999000549e-001 + <_> + + 0 -1 214 -1.5764944255352020e-002 + + 2.3869976401329041e-001 -6.7887537181377411e-002 + <_> + + 0 -1 762 -3.9155382663011551e-002 + + 3.5364216566085815e-001 -3.9627619087696075e-002 + <_> + + 0 -1 93 -4.5880142599344254e-002 + + 2.3842345178127289e-001 -7.3718093335628510e-002 + <_> + + 0 -1 318 -1.0302878916263580e-002 + + 2.1327751874923706e-001 -6.4293526113033295e-002 + <_> + + 0 -1 655 2.9027346521615982e-002 + + -4.4896885752677917e-002 3.2940328121185303e-001 + <_> + + 0 -1 87 6.6355936229228973e-002 + + 2.0458346232771873e-002 -6.6512107849121094e-001 + <_> + + 0 -1 57 -2.8334883973002434e-004 + + 1.5212120115756989e-001 -8.4248125553131104e-002 + <_> + + 0 -1 729 3.6828286945819855e-002 + + 1.7162136733531952e-002 -7.2021645307540894e-001 + <_> + + 0 -1 519 -5.8778584003448486e-002 + + 5.6561857461929321e-001 -2.3866711184382439e-002 + <_> + + 0 -1 652 -2.8530266135931015e-003 + + -6.4782738685607910e-001 1.9957689568400383e-002 + <_> + + 0 -1 763 -3.8836512714624405e-002 + + 3.2825550436973572e-001 -3.7595361471176147e-002 + <_> + + 0 -1 457 2.9163623694330454e-003 + + -7.3809541761875153e-002 1.7669951915740967e-001 + <_> + + 0 -1 95 1.0645738802850246e-002 + + -7.0638619363307953e-002 2.3382186889648438e-001 + <_> + + 0 -1 182 7.0208713412284851e-002 + + 1.4219421893358231e-002 -8.6676079034805298e-001 + <_> + + 0 -1 328 4.7699254937469959e-003 + + -5.8143001049757004e-002 2.2547726333141327e-001 + <_> + + 0 -1 174 7.7869743108749390e-002 + + 2.1615644916892052e-002 -5.5831909179687500e-001 + <_> + + 0 -1 456 6.1486944556236267e-002 + + 1.1775434948503971e-002 -8.4519267082214355e-001 + <_> + + 0 -1 596 -3.7456271238625050e-003 + + 1.5362323820590973e-001 -7.8043371438980103e-002 + + <_> + 100 + -2.0724730491638184e+000 + + <_> + + 0 -1 592 -6.0319140553474426e-002 + + 5.2276933193206787e-001 -5.9983855485916138e-001 + <_> + + 0 -1 216 2.2086698561906815e-002 + + -4.0275236964225769e-001 2.6235228776931763e-001 + <_> + + 0 -1 418 6.9381311535835266e-002 + + -1.6929075121879578e-001 4.8264318704605103e-001 + <_> + + 0 -1 237 3.6362867802381516e-002 + + -2.3040720820426941e-001 2.1662452816963196e-001 + <_> + + 0 -1 253 -2.0021330565214157e-002 + + 4.1769924759864807e-001 -7.2497829794883728e-002 + <_> + + 0 -1 516 1.8830269575119019e-002 + + -1.0871052742004395e-001 2.7507993578910828e-001 + <_> + + 0 -1 88 -2.7963159605860710e-002 + + 3.8442537188529968e-001 -9.8193608224391937e-002 + <_> + + 0 -1 179 -2.0735040307044983e-002 + + 2.7963668107986450e-001 -1.0946217179298401e-001 + <_> + + 0 -1 433 1.8611408770084381e-002 + + -8.4531605243682861e-002 3.4154561161994934e-001 + <_> + + 0 -1 462 6.1192102730274200e-002 + + -8.4275096654891968e-002 3.8789725303649902e-001 + <_> + + 0 -1 792 -6.3834507018327713e-003 + + -3.6611992120742798e-001 7.2199709713459015e-002 + <_> + + 0 -1 615 -1.9713025540113449e-002 + + 5.5944812297821045e-001 -5.0970125943422318e-002 + <_> + + 0 -1 591 1.6262326389551163e-002 + + -1.1030866950750351e-001 1.8134446442127228e-001 + <_> + + 0 -1 738 -7.9304602695629001e-004 + + 2.1516664326190948e-001 -9.0573474764823914e-002 + <_> + + 0 -1 271 -1.0854007303714752e-001 + + -5.0619691610336304e-001 4.6518307179212570e-002 + <_> + + 0 -1 142 1.2656173110008240e-001 + + -7.0183344185352325e-002 2.9395601153373718e-001 + <_> + + 0 -1 4 1.0736478492617607e-002 + + -6.7497111856937408e-002 3.2903778553009033e-001 + <_> + + 0 -1 547 1.3156885281205177e-002 + + -4.8924397677183151e-002 3.8120672106742859e-001 + <_> + + 0 -1 673 4.1970603168010712e-002 + + -7.8111015260219574e-002 2.7430343627929688e-001 + <_> + + 0 -1 563 -2.9167115688323975e-002 + + -3.0556207895278931e-001 6.0548584908246994e-002 + <_> + + 0 -1 628 9.5878299325704575e-003 + + -5.9223473072052002e-002 4.0729284286499023e-001 + <_> + + 0 -1 621 4.6066567301750183e-002 + + -1.0950764268636703e-001 2.0254631340503693e-001 + <_> + + 0 -1 444 8.3330616354942322e-002 + + -5.3298909217119217e-002 3.5257405042648315e-001 + <_> + + 0 -1 464 1.6818877309560776e-002 + + -1.8468046188354492e-001 1.0324559360742569e-001 + <_> + + 0 -1 218 8.7530359625816345e-002 + + 3.3843815326690674e-002 -5.7269579172134399e-001 + <_> + + 0 -1 629 -4.3712105602025986e-002 + + -6.5228754281997681e-001 2.6080906391143799e-002 + <_> + + 0 -1 501 -6.9797024130821228e-002 + + 3.5989531874656677e-001 -5.5579621344804764e-002 + <_> + + 0 -1 584 1.2184922397136688e-001 + + 2.4342115968465805e-002 -8.0760699510574341e-001 + <_> + + 0 -1 505 4.6440614387392998e-003 + + -9.6218079328536987e-002 1.9613739848136902e-001 + <_> + + 0 -1 302 6.5494313836097717e-002 + + 2.0375160500407219e-002 -8.4897041320800781e-001 + <_> + + 0 -1 631 -2.7141533792018890e-002 + + -5.7403481006622314e-001 2.1777858957648277e-002 + <_> + + 0 -1 611 7.8971475362777710e-002 + + -4.5071244239807129e-002 4.1026180982589722e-001 + <_> + + 0 -1 553 -1.9546686671674252e-003 + + 1.7176805436611176e-001 -9.1940574347972870e-002 + <_> + + 0 -1 781 8.5139460861682892e-004 + + -1.3312540948390961e-001 1.3347741961479187e-001 + <_> + + 0 -1 623 -6.6646421328186989e-003 + + -5.8490514755249023e-001 2.7229143306612968e-002 + <_> + + 0 -1 339 -4.4413385912775993e-003 + + -4.1740539669990540e-001 3.3226296305656433e-002 + <_> + + 0 -1 287 1.8269449472427368e-001 + + 2.4889033287763596e-002 -5.3175938129425049e-001 + <_> + + 0 -1 502 -4.3986845761537552e-002 + + 3.8222178816795349e-001 -4.2776387184858322e-002 + <_> + + 0 -1 807 -2.6941983029246330e-002 + + 2.9621607065200806e-001 -5.3630694746971130e-002 + <_> + + 0 -1 28 2.6566657423973083e-001 + + -7.0285581052303314e-002 2.5832760334014893e-001 + <_> + + 0 -1 414 -4.1604138910770416e-002 + + 2.2306843101978302e-001 -8.1017620861530304e-002 + <_> + + 0 -1 544 -3.6077257245779037e-002 + + -2.9468396306037903e-001 5.4006773978471756e-002 + <_> + + 0 -1 411 -3.9362259209156036e-002 + + 5.6163829565048218e-001 -2.9768040403723717e-002 + <_> + + 0 -1 815 -2.9800175689160824e-003 + + -3.6741560697555542e-001 4.4891070574522018e-002 + <_> + + 0 -1 463 2.4718637578189373e-003 + + -1.1504692584276199e-001 1.4088194072246552e-001 + <_> + + 0 -1 39 -1.1537356302142143e-002 + + -5.5634349584579468e-001 2.6983646675944328e-002 + <_> + + 0 -1 372 -1.2723604217171669e-002 + + 2.7351182699203491e-001 -5.8644331991672516e-002 + <_> + + 0 -1 71 2.4906045291572809e-004 + + -8.4738910198211670e-002 1.6140305995941162e-001 + <_> + + 0 -1 125 1.4373645186424255e-002 + + 2.8544176369905472e-002 -5.2416086196899414e-001 + <_> + + 0 -1 708 1.0028794407844543e-002 + + -6.1999477446079254e-002 2.3914791643619537e-001 + <_> + + 0 -1 708 -1.4209940098226070e-002 + + 1.9753696024417877e-001 -8.3127744495868683e-002 + <_> + + 0 -1 423 2.4372754991054535e-001 + + -6.7355602979660034e-002 2.4561752378940582e-001 + <_> + + 0 -1 536 -1.6633799672126770e-001 + + 2.5892275571823120e-001 -6.2999695539474487e-002 + <_> + + 0 -1 757 4.5368936844170094e-003 + + 2.5090510025620461e-002 -5.9024924039840698e-001 + <_> + + 0 -1 219 -3.0643681064248085e-002 + + 1.8818324804306030e-001 -8.2770593464374542e-002 + <_> + + 0 -1 442 4.5454028993844986e-002 + + -1.0276072472333908e-001 1.7047047615051270e-001 + <_> + + 0 -1 312 -6.5909177064895630e-002 + + 2.6654756069183350e-001 -5.2585989236831665e-002 + <_> + + 0 -1 239 -1.4441770315170288e-001 + + -6.7441099882125854e-001 2.1799251437187195e-002 + <_> + + 0 -1 545 6.9004088640213013e-002 + + 1.7652159556746483e-002 -6.1918365955352783e-001 + <_> + + 0 -1 634 -1.4434859156608582e-002 + + 2.2330665588378906e-001 -6.3716575503349304e-002 + <_> + + 0 -1 161 -1.2439724802970886e-001 + + 3.1121006608009338e-001 -5.1833681762218475e-002 + <_> + + 0 -1 212 -1.1646759696304798e-002 + + 1.4673095941543579e-001 -1.0670039057731628e-001 + <_> + + 0 -1 249 -1.1230677366256714e-002 + + 2.7083748579025269e-001 -5.6705884635448456e-002 + <_> + + 0 -1 374 -5.6339222937822342e-003 + + -4.7205814719200134e-001 3.1372327357530594e-002 + <_> + + 0 -1 422 -2.3702147603034973e-001 + + -5.4121118783950806e-001 2.5358194485306740e-002 + <_> + + 0 -1 704 4.6283796429634094e-002 + + -2.3110834881663322e-002 6.7559683322906494e-001 + <_> + + 0 -1 564 -7.8479632735252380e-002 + + -5.0711256265640259e-001 2.8956977650523186e-002 + <_> + + 0 -1 749 -4.0851697325706482e-002 + + -4.6281021833419800e-001 2.6245500892400742e-002 + <_> + + 0 -1 373 2.3407647386193275e-002 + + -2.9626572504639626e-002 4.9260509014129639e-001 + <_> + + 0 -1 818 -8.5386848077178001e-003 + + -3.8172510266304016e-001 3.8954172283411026e-002 + <_> + + 0 -1 512 -3.6464400589466095e-002 + + 2.7732792496681213e-001 -5.0175756216049194e-002 + <_> + + 0 -1 153 6.4124604687094688e-003 + + 8.0348476767539978e-002 -1.6669270396232605e-001 + <_> + + 0 -1 306 -1.2259284034371376e-002 + + -1.7725226283073425e-001 7.8243546187877655e-002 + <_> + + 0 -1 663 -1.4410009607672691e-002 + + -5.3909307718276978e-001 2.4736655876040459e-002 + <_> + + 0 -1 413 8.1370621919631958e-002 + + 1.7058655619621277e-002 -6.6197627782821655e-001 + <_> + + 0 -1 70 2.0788952708244324e-001 + + 1.4909379184246063e-002 -6.9600278139114380e-001 + <_> + + 0 -1 228 1.2777261435985565e-002 + + 1.7526712268590927e-002 -6.2062412500381470e-001 + <_> + + 0 -1 648 -4.7627098858356476e-002 + + -5.8799284696578979e-001 1.8566770479083061e-002 + <_> + + 0 -1 426 -4.6582617796957493e-003 + + -8.1543582677841187e-001 1.2595964595675468e-002 + <_> + + 0 -1 315 -1.2581307440996170e-002 + + -3.7885621190071106e-001 2.9692940413951874e-002 + <_> + + 0 -1 233 -5.3676500916481018e-002 + + 2.3870430886745453e-001 -6.4122043550014496e-002 + <_> + + 0 -1 203 3.8745878264307976e-003 + + 3.5704579204320908e-002 -3.6539191007614136e-001 + <_> + + 0 -1 23 -1.7611593008041382e-002 + + 3.9749976992607117e-001 -4.1170511394739151e-002 + <_> + + 0 -1 222 4.4456262141466141e-002 + + -4.1849795728921890e-002 3.0652716755867004e-001 + <_> + + 0 -1 432 -3.6562662571668625e-003 + + 1.3611657917499542e-001 -9.8506972193717957e-002 + <_> + + 0 -1 467 -8.4179647266864777e-002 + + 2.2154912352561951e-001 -5.5714718997478485e-002 + <_> + + 0 -1 130 -1.8433203920722008e-002 + + -5.7187604904174805e-001 2.2685579955577850e-002 + <_> + + 0 -1 385 -4.6902447938919067e-003 + + 3.0469116568565369e-001 -4.2904902249574661e-002 + <_> + + 0 -1 760 1.0806242004036903e-002 + + 7.0505954325199127e-002 -1.8206176161766052e-001 + <_> + + 0 -1 395 -1.0084950365126133e-002 + + 1.6342975199222565e-001 -9.2139340937137604e-002 + <_> + + 0 -1 513 -5.1698148250579834e-002 + + 2.1090564131736755e-001 -9.2717804014682770e-002 + <_> + + 0 -1 730 1.5504235401749611e-002 + + 8.0614410340785980e-002 -1.8229016661643982e-001 + <_> + + 0 -1 319 -6.5531536936759949e-002 + + -9.4066488742828369e-001 1.2599020265042782e-002 + <_> + + 0 -1 337 1.1431857943534851e-002 + + -6.7865155637264252e-002 1.9108988344669342e-001 + <_> + + 0 -1 193 -4.5544765889644623e-003 + + 1.9530622661113739e-001 -6.3755750656127930e-002 + <_> + + 0 -1 399 -1.9059362821280956e-003 + + 1.9076475501060486e-001 -6.8802095949649811e-002 + <_> + + 0 -1 107 2.4532977840863168e-004 + + -8.7943159043788910e-002 1.3628348708152771e-001 + <_> + + 0 -1 109 1.4765396714210510e-002 + + 2.1555462852120399e-002 -5.9361493587493896e-001 + <_> + + 0 -1 777 4.4241929426789284e-003 + + 1.4493391849100590e-002 -6.9662350416183472e-001 + <_> + + 0 -1 700 -3.0718728899955750e-002 + + -6.5334641933441162e-001 1.5405872836709023e-002 + + <_> + 100 + -2.0037040710449219e+000 + + <_> + + 0 -1 90 4.7543779015541077e-002 + + -6.1525166034698486e-001 3.6809986829757690e-001 + <_> + + 0 -1 685 -7.3774546384811401e-002 + + 2.1485500037670135e-001 -4.6901205182075500e-001 + <_> + + 0 -1 412 1.4361584186553955e-001 + + -1.2247033417224884e-001 6.2496304512023926e-001 + <_> + + 0 -1 50 4.2762514948844910e-001 + + -1.9074614346027374e-001 1.9916842877864838e-001 + <_> + + 0 -1 251 -2.0962147042155266e-002 + + 3.7791857123374939e-001 -8.7501220405101776e-002 + <_> + + 0 -1 810 8.3826966583728790e-003 + + 1.9660057127475739e-001 -2.1895657479763031e-001 + <_> + + 0 -1 746 1.2495662271976471e-001 + + -9.2528872191905975e-002 5.6979125738143921e-001 + <_> + + 0 -1 171 3.2186970114707947e-002 + + -1.1871361732482910e-001 2.5297036767005920e-001 + <_> + + 0 -1 281 -2.2608190774917603e-001 + + -3.6882448196411133e-001 9.2433169484138489e-002 + <_> + + 0 -1 409 1.5436002984642982e-002 + + -7.2143688797950745e-002 3.4161823987960815e-001 + <_> + + 0 -1 728 3.8813985884189606e-002 + + -6.6726602613925934e-002 3.8695737719535828e-001 + <_> + + 0 -1 381 -1.7647115513682365e-002 + + 2.4067574739456177e-001 -1.3484911620616913e-001 + <_> + + 0 -1 428 2.0411893725395203e-002 + + -9.0382359921932220e-002 3.3653914928436279e-001 + <_> + + 0 -1 656 4.6434059739112854e-002 + + -4.1718214750289917e-002 5.5478698015213013e-001 + <_> + + 0 -1 552 -1.6289663314819336e-001 + + -6.8476778268814087e-001 3.9775040000677109e-002 + <_> + + 0 -1 465 2.2175738215446472e-001 + + 2.2538824006915092e-002 -6.5304112434387207e-001 + <_> + + 0 -1 714 8.5806567221879959e-003 + + 4.2480908334255219e-002 -4.4011196494102478e-001 + <_> + + 0 -1 577 -9.3137830495834351e-002 + + 4.0965080261230469e-001 -5.2290525287389755e-002 + <_> + + 0 -1 376 2.3062177002429962e-002 + + -1.1257856339216232e-001 1.9754567742347717e-001 + <_> + + 0 -1 561 -6.0689959675073624e-002 + + 5.8629971742630005e-001 -3.0010640621185303e-002 + <_> + + 0 -1 723 3.2815791666507721e-002 + + -3.9535962045192719e-002 5.0288110971450806e-001 + <_> + + 0 -1 669 -1.7895067110657692e-002 + + -4.3293157219886780e-001 4.8371005803346634e-002 + <_> + + 0 -1 331 6.0416408814489841e-003 + + -1.1830459535121918e-001 1.5037854015827179e-001 + <_> + + 0 -1 191 7.2951717302203178e-003 + + -8.7231896817684174e-002 2.2312921285629272e-001 + <_> + + 0 -1 821 -1.2723362306132913e-003 + + 1.5851731598377228e-001 -1.5261711180210114e-001 + <_> + + 0 -1 608 -2.4346606805920601e-003 + + -5.2965056896209717e-001 3.2590351998806000e-002 + <_> + + 0 -1 699 -3.6002553999423981e-002 + + -4.7963315248489380e-001 3.3382374793291092e-002 + <_> + + 0 -1 21 -9.9108107388019562e-003 + + -4.1830718517303467e-001 3.9141375571489334e-002 + <_> + + 0 -1 494 1.6194984316825867e-002 + + -8.5234634578227997e-002 2.3464871942996979e-001 + <_> + + 0 -1 58 -3.3725582063198090e-002 + + 2.3940484225749969e-001 -7.6999202370643616e-002 + <_> + + 0 -1 609 -1.8347065895795822e-002 + + 5.8518385887145996e-001 -3.3470980823040009e-002 + <_> + + 0 -1 365 -9.8516181111335754e-002 + + -3.6391365528106689e-001 4.9527972936630249e-002 + <_> + + 0 -1 394 -6.0063995420932770e-002 + + 3.6112886667251587e-001 -5.6244421750307083e-002 + <_> + + 0 -1 345 9.0678725391626358e-003 + + 4.0666222572326660e-002 -4.6414673328399658e-001 + <_> + + 0 -1 152 2.3706434294581413e-003 + + 6.3329577445983887e-002 -2.4620380997657776e-001 + <_> + + 0 -1 172 -6.6419452428817749e-002 + + 2.8512617945671082e-001 -5.8341175317764282e-002 + <_> + + 0 -1 575 4.2974375188350677e-002 + + -7.1375496685504913e-002 3.0780154466629028e-001 + <_> + + 0 -1 100 8.7192654609680176e-003 + + -7.9267904162406921e-002 2.0602001249790192e-001 + <_> + + 0 -1 269 1.2002356350421906e-002 + + -1.5233461558818817e-001 1.1318613588809967e-001 + <_> + + 0 -1 98 -9.8644010722637177e-003 + + -7.8539586067199707e-001 1.8687475472688675e-002 + <_> + + 0 -1 290 -8.9075282216072083e-002 + + 4.6060782670974731e-001 -4.2248334735631943e-002 + <_> + + 0 -1 20 1.0566137731075287e-002 + + 3.0384514480829239e-002 -5.7432663440704346e-001 + <_> + + 0 -1 284 -1.0629332065582275e-001 + + -6.0937166213989258e-001 2.0774191245436668e-002 + <_> + + 0 -1 788 2.1692281588912010e-003 + + 2.3979913443326950e-002 -5.2453607320785522e-001 + <_> + + 0 -1 679 -1.1789994314312935e-003 + + -1.6677659749984741e-001 8.3988659083843231e-002 + <_> + + 0 -1 26 -6.6964447498321533e-002 + + 2.8124526143074036e-001 -5.2651099860668182e-002 + <_> + + 0 -1 356 -1.4276498556137085e-001 + + -4.4757640361785889e-001 3.4932579845190048e-002 + <_> + + 0 -1 225 -2.4635739624500275e-002 + + 2.6846516132354736e-001 -7.4156984686851501e-002 + <_> + + 0 -1 25 3.9562156796455383e-001 + + -8.6417727172374725e-002 1.8380576372146606e-001 + <_> + + 0 -1 217 5.2181005477905273e-001 + + 2.9292779043316841e-002 -6.1001229286193848e-001 + <_> + + 0 -1 305 -1.6573144495487213e-001 + + -6.5797531604766846e-001 1.7244184389710426e-002 + <_> + + 0 -1 386 -1.6044322401285172e-002 + + 4.1903367638587952e-001 -3.7789851427078247e-002 + <_> + + 0 -1 366 2.6809714734554291e-002 + + -8.2740567624568939e-002 1.7777737975120544e-001 + <_> + + 0 -1 336 7.8544206917285919e-003 + + -6.8523615598678589e-002 2.1773363649845123e-001 + <_> + + 0 -1 742 -3.5230373032391071e-003 + + 1.4932866394519806e-001 -1.0709155350923538e-001 + <_> + + 0 -1 41 1.5989519655704498e-002 + + 7.6693944633007050e-002 -1.7386263608932495e-001 + <_> + + 0 -1 715 -6.4930706284940243e-003 + + 1.5801398456096649e-001 -8.9987613260746002e-002 + <_> + + 0 -1 402 -6.8842992186546326e-003 + + -6.6933703422546387e-001 1.8489124253392220e-002 + <_> + + 0 -1 613 -5.5425488390028477e-003 + + -5.8793991804122925e-001 1.7602920532226563e-002 + <_> + + 0 -1 720 -1.4227415435016155e-002 + + 1.9111973047256470e-001 -7.4273422360420227e-002 + <_> + + 0 -1 653 -6.1660851351916790e-003 + + 1.7060419917106628e-001 -1.0579483211040497e-001 + <_> + + 0 -1 379 -4.3110325932502747e-002 + + 4.1028141975402832e-001 -3.4564148634672165e-002 + <_> + + 0 -1 473 5.4760933853685856e-003 + + -1.1366824805736542e-001 1.2372239679098129e-001 + <_> + + 0 -1 358 -1.9582428503781557e-003 + + 2.4462001025676727e-001 -6.2438860535621643e-002 + <_> + + 0 -1 146 -9.3984082341194153e-002 + + -5.8154088258743286e-001 2.3986594751477242e-002 + <_> + + 0 -1 69 2.0285267382860184e-003 + + -1.1612447351217270e-001 1.2180737406015396e-001 + <_> + + 0 -1 46 -1.0831435024738312e-001 + + 3.1536707282066345e-001 -5.4184518754482269e-002 + <_> + + 0 -1 490 4.3249905109405518e-002 + + -6.1532370746135712e-002 2.4403128027915955e-001 + <_> + + 0 -1 739 -5.3873187862336636e-003 + + 2.9388564825057983e-001 -5.8327767997980118e-002 + <_> + + 0 -1 737 -2.1797083318233490e-002 + + -7.5672888755798340e-001 1.7756229266524315e-002 + <_> + + 0 -1 668 2.6498941704630852e-002 + + -4.1974004358053207e-002 3.5842666029930115e-001 + <_> + + 0 -1 812 -2.8153134509921074e-002 + + -7.5790113210678101e-001 2.0845755934715271e-002 + <_> + + 0 -1 168 2.7076231315732002e-002 + + 2.1633280441164970e-002 -5.1440018415451050e-001 + <_> + + 0 -1 126 2.8455932624638081e-003 + + 7.9507686197757721e-002 -1.8178141117095947e-001 + <_> + + 0 -1 19 7.8926710411906242e-003 + + 1.6891311854124069e-002 -7.8931826353073120e-001 + <_> + + 0 -1 403 9.6260979771614075e-003 + + 1.0690424591302872e-002 -8.5264998674392700e-001 + <_> + + 0 -1 286 -7.1314051747322083e-002 + + 3.9668402075767517e-001 -3.6018040031194687e-002 + <_> + + 0 -1 666 1.4971541240811348e-003 + + 9.4971261918544769e-002 -1.5711499750614166e-001 + <_> + + 0 -1 813 -3.8292277604341507e-002 + + 3.6354860663414001e-001 -3.7645917385816574e-002 + <_> + + 0 -1 565 -9.0788856148719788e-002 + + -7.2015196084976196e-001 2.0330978557467461e-002 + <_> + + 0 -1 221 4.6655405312776566e-003 + + -4.0195610374212265e-002 3.6163648962974548e-001 + <_> + + 0 -1 236 -4.8079788684844971e-002 + + 5.3677225112915039e-001 -2.3413557559251785e-002 + <_> + + 0 -1 500 1.6154326498508453e-002 + + -8.4327124059200287e-002 1.9077171385288239e-001 + <_> + + 0 -1 662 -6.5981596708297729e-003 + + 2.8649520874023438e-001 -5.4097983986139297e-002 + <_> + + 0 -1 201 4.3116593733429909e-003 + + 2.6405530050396919e-002 -5.2418345212936401e-001 + <_> + + 0 -1 793 2.4752719327807426e-003 + + 1.9609335809946060e-002 -6.0011303424835205e-001 + <_> + + 0 -1 630 2.0701158791780472e-002 + + -4.2482525110244751e-002 3.3890983462333679e-001 + <_> + + 0 -1 445 3.2515443861484528e-002 + + -4.9479570239782333e-002 2.7387470006942749e-001 + <_> + + 0 -1 60 8.5999246221035719e-004 + + 1.5073892474174500e-001 -9.3416213989257813e-002 + <_> + + 0 -1 82 3.1756132841110229e-002 + + -9.4276405870914459e-002 2.0481915771961212e-001 + <_> + + 0 -1 304 4.5372098684310913e-002 + + -1.1695238947868347e-001 1.3142672181129456e-001 + <_> + + 0 -1 369 -1.4278095960617065e-001 + + 2.4391913414001465e-001 -6.1943054199218750e-002 + <_> + + 0 -1 248 7.0435162633657455e-003 + + -5.4324191063642502e-002 2.8498598933219910e-001 + <_> + + 0 -1 220 -3.1006825156509876e-003 + + -1.3665364682674408e-001 1.1120753735303879e-001 + <_> + + 0 -1 620 4.8597272485494614e-002 + + 1.5732062980532646e-002 -8.4663796424865723e-001 + <_> + + 0 -1 530 -1.0568272322416306e-002 + + -9.2514985799789429e-001 9.3551985919475555e-003 + <_> + + 0 -1 476 -3.0635483562946320e-002 + + -6.1497151851654053e-001 1.6442870721220970e-002 + <_> + + 0 -1 79 4.2528659105300903e-002 + + 2.4779593572020531e-002 -4.4173952937126160e-001 + <_> + + 0 -1 62 8.9158815145492554e-001 + + 2.8625806793570518e-002 -4.2071193456649780e-001 + <_> + + 0 -1 72 -2.3522457107901573e-002 + + -5.3869980573654175e-001 2.1518746390938759e-002 + + <_> + + <_> + 0 0 1 4 -1. + <_> + 0 1 1 2 2. + 0 + <_> + + <_> + 0 0 3 20 -1. + <_> + 1 0 1 20 3. + 0 + <_> + + <_> + 0 0 2 4 -1. + <_> + 0 2 2 2 2. + 0 + <_> + + <_> + 0 0 2 6 -1. + <_> + 0 2 2 2 3. + 0 + <_> + + <_> + 0 0 4 4 -1. + <_> + 0 0 2 2 2. + <_> + 2 2 2 2 2. + 0 + <_> + + <_> + 0 0 2 22 -1. + <_> + 0 11 2 11 2. + 0 + <_> + + <_> + 0 0 9 24 -1. + <_> + 3 8 3 8 9. + 0 + <_> + + <_> + 0 0 8 6 -1. + <_> + 4 0 4 6 2. + 0 + <_> + + <_> + 0 0 10 6 -1. + <_> + 0 0 5 3 2. + <_> + 5 3 5 3 2. + 0 + <_> + + <_> + 0 0 15 12 -1. + <_> + 5 4 5 4 9. + 0 + <_> + + <_> + 0 0 12 6 -1. + <_> + 6 0 6 6 2. + 0 + <_> + + <_> + 0 0 14 4 -1. + <_> + 7 0 7 4 2. + 0 + <_> + + <_> + 0 0 8 9 -1. + <_> + 0 3 8 3 3. + 0 + <_> + + <_> + 0 0 11 4 -1. + <_> + 0 1 11 2 2. + 0 + <_> + + <_> + 0 1 1 2 -1. + <_> + 0 2 1 1 2. + 0 + <_> + + <_> + 0 1 3 3 -1. + <_> + 1 2 1 1 9. + 0 + <_> + + <_> + 0 1 3 9 -1. + <_> + 1 4 1 3 9. + 0 + <_> + + <_> + 0 1 3 12 -1. + <_> + 1 5 1 4 9. + 0 + <_> + + <_> + 0 1 2 21 -1. + <_> + 1 1 1 21 2. + 0 + <_> + + <_> + 0 1 2 3 -1. + <_> + 0 2 2 1 3. + 0 + <_> + + <_> + 0 1 2 4 -1. + <_> + 0 3 2 2 2. + 0 + <_> + + <_> + 0 1 2 9 -1. + <_> + 0 4 2 3 3. + 0 + <_> + + <_> + 0 1 3 2 -1. + <_> + 0 2 3 1 2. + 0 + <_> + + <_> + 0 1 6 4 -1. + <_> + 0 1 3 2 2. + <_> + 3 3 3 2 2. + 0 + <_> + + <_> + 0 1 15 6 -1. + <_> + 5 3 5 2 9. + 0 + <_> + + <_> + 0 1 15 9 -1. + <_> + 5 4 5 3 9. + 0 + <_> + + <_> + 0 1 6 20 -1. + <_> + 0 6 6 10 2. + 0 + <_> + + <_> + 0 1 7 2 -1. + <_> + 0 2 7 1 2. + 0 + <_> + + <_> + 0 1 15 24 -1. + <_> + 0 7 15 12 2. + 0 + <_> + + <_> + 0 2 4 19 -1. + <_> + 1 2 2 19 2. + 0 + <_> + + <_> + 0 2 2 2 -1. + <_> + 0 3 2 1 2. + 0 + <_> + + <_> + 0 2 2 8 -1. + <_> + 0 4 2 4 2. + 0 + <_> + + <_> + 0 2 6 4 -1. + <_> + 0 2 3 2 2. + <_> + 3 4 3 2 2. + 0 + <_> + + <_> + 0 2 12 15 -1. + <_> + 0 7 12 5 3. + 0 + <_> + + <_> + 0 2 15 15 -1. + <_> + 0 7 15 5 3. + 0 + <_> + + <_> + 0 3 3 4 -1. + <_> + 1 3 1 4 3. + 0 + <_> + + <_> + 0 3 3 16 -1. + <_> + 1 3 1 16 3. + 0 + <_> + + <_> + 0 3 4 17 -1. + <_> + 1 3 2 17 2. + 0 + <_> + + <_> + 0 3 4 19 -1. + <_> + 1 3 2 19 2. + 0 + <_> + + <_> + 0 3 2 6 -1. + <_> + 0 5 2 2 3. + 0 + <_> + + <_> + 0 3 8 4 -1. + <_> + 0 3 4 2 2. + <_> + 4 5 4 2 2. + 0 + <_> + + <_> + 0 3 13 6 -1. + <_> + 0 5 13 2 3. + 0 + <_> + + <_> + 0 4 2 15 -1. + <_> + 1 4 1 15 2. + 0 + <_> + + <_> + 0 4 15 15 -1. + <_> + 5 9 5 5 9. + 0 + <_> + + <_> + 0 4 12 14 -1. + <_> + 6 4 6 14 2. + 0 + <_> + + <_> + 0 5 2 13 -1. + <_> + 1 5 1 13 2. + 0 + <_> + + <_> + 0 5 8 8 -1. + <_> + 2 5 4 8 2. + 0 + <_> + + <_> + 0 5 15 15 -1. + <_> + 5 10 5 5 9. + 0 + <_> + + <_> + 0 6 1 4 -1. + <_> + 0 7 1 2 2. + 0 + <_> + + <_> + 0 6 4 12 -1. + <_> + 0 9 4 6 2. + 0 + <_> + + <_> + 0 6 15 12 -1. + <_> + 5 10 5 4 9. + 0 + <_> + + <_> + 0 6 14 6 -1. + <_> + 0 6 7 3 2. + <_> + 7 9 7 3 2. + 0 + <_> + + <_> + 0 7 3 13 -1. + <_> + 1 7 1 13 3. + 0 + <_> + + <_> + 0 7 8 1 -1. + <_> + 2 7 4 1 2. + 0 + <_> + + <_> + 0 7 4 9 -1. + <_> + 2 7 2 9 2. + 0 + <_> + + <_> + 0 7 3 4 -1. + <_> + 0 9 3 2 2. + 0 + <_> + + <_> + 0 7 6 6 -1. + <_> + 0 10 6 3 2. + 0 + <_> + + <_> + 0 8 1 4 -1. + <_> + 0 9 1 2 2. + 0 + <_> + + <_> + 0 8 4 11 -1. + <_> + 1 8 2 11 2. + 0 + <_> + + <_> + 0 8 14 2 -1. + <_> + 0 8 7 1 2. + <_> + 7 9 7 1 2. + 0 + <_> + + <_> + 0 9 4 15 -1. + <_> + 2 9 2 15 2. + 0 + <_> + + <_> + 0 9 8 4 -1. + <_> + 4 9 4 4 2. + 0 + <_> + + <_> + 0 9 15 15 -1. + <_> + 5 14 5 5 9. + 0 + <_> + + <_> + 0 10 3 12 -1. + <_> + 1 14 1 4 9. + 0 + <_> + + <_> + 0 10 12 6 -1. + <_> + 6 10 6 6 2. + 0 + <_> + + <_> + 0 11 15 9 -1. + <_> + 0 14 15 3 3. + 0 + <_> + + <_> + 0 12 10 1 -1. + <_> + 5 12 5 1 2. + 0 + <_> + + <_> + 0 13 4 7 -1. + <_> + 1 13 2 7 2. + 0 + <_> + + <_> + 0 13 9 3 -1. + <_> + 3 13 3 3 3. + 0 + <_> + + <_> + 0 13 3 12 -1. + <_> + 0 16 3 6 2. + 0 + <_> + + <_> + 0 13 14 9 -1. + <_> + 7 13 7 9 2. + 0 + <_> + + <_> + 0 14 1 4 -1. + <_> + 0 15 1 2 2. + 0 + <_> + + <_> + 0 14 12 1 -1. + <_> + 3 14 6 1 2. + 0 + <_> + + <_> + 0 14 14 6 -1. + <_> + 0 16 14 2 3. + 0 + <_> + + <_> + 0 15 4 4 -1. + <_> + 1 15 2 4 2. + 0 + <_> + + <_> + 0 15 3 7 -1. + <_> + 1 15 1 7 3. + 0 + <_> + + <_> + 0 15 12 10 -1. + <_> + 3 15 6 10 2. + 0 + <_> + + <_> + 0 16 3 6 -1. + <_> + 1 18 1 2 9. + 0 + <_> + + <_> + 0 16 3 6 -1. + <_> + 1 16 1 6 3. + 0 + <_> + + <_> + 0 16 8 2 -1. + <_> + 2 16 4 2 2. + 0 + <_> + + <_> + 0 16 12 3 -1. + <_> + 3 16 6 3 2. + 0 + <_> + + <_> + 0 16 12 5 -1. + <_> + 6 16 6 5 2. + 0 + <_> + + <_> + 0 17 8 3 -1. + <_> + 2 17 4 3 2. + 0 + <_> + + <_> + 0 17 9 3 -1. + <_> + 3 18 3 1 9. + 0 + <_> + + <_> + 0 17 9 3 -1. + <_> + 3 17 3 3 3. + 0 + <_> + + <_> + 0 17 8 8 -1. + <_> + 4 17 4 8 2. + 0 + <_> + + <_> + 0 18 8 2 -1. + <_> + 2 18 4 2 2. + 0 + <_> + + <_> + 0 18 8 3 -1. + <_> + 2 18 4 3 2. + 0 + <_> + + <_> + 0 18 9 2 -1. + <_> + 3 18 3 2 3. + 0 + <_> + + <_> + 0 18 5 4 -1. + <_> + 0 19 5 2 2. + 0 + <_> + + <_> + 0 18 10 4 -1. + <_> + 0 18 5 2 2. + <_> + 5 20 5 2 2. + 0 + <_> + + <_> + 0 18 15 4 -1. + <_> + 5 18 5 4 3. + 0 + <_> + + <_> + 0 18 12 7 -1. + <_> + 6 18 6 7 2. + 0 + <_> + + <_> + 0 19 9 3 -1. + <_> + 3 20 3 1 9. + 0 + <_> + + <_> + 0 19 8 6 -1. + <_> + 4 19 4 6 2. + 0 + <_> + + <_> + 0 20 8 1 -1. + <_> + 2 20 4 1 2. + 0 + <_> + + <_> + 0 20 15 2 -1. + <_> + 5 20 5 2 3. + 0 + <_> + + <_> + 0 20 14 5 -1. + <_> + 7 20 7 5 2. + 0 + <_> + + <_> + 0 21 3 2 -1. + <_> + 1 21 1 2 3. + 0 + <_> + + <_> + 0 21 12 4 -1. + <_> + 0 22 12 2 2. + 0 + <_> + + <_> + 0 21 13 3 -1. + <_> + 0 22 13 1 3. + 0 + <_> + + <_> + 0 21 14 4 -1. + <_> + 0 23 14 2 2. + 0 + <_> + + <_> + 0 22 9 3 -1. + <_> + 3 22 3 3 3. + 0 + <_> + + <_> + 0 22 11 3 -1. + <_> + 0 23 11 1 3. + 0 + <_> + + <_> + 0 22 13 2 -1. + <_> + 0 23 13 1 2. + 0 + <_> + + <_> + 0 22 15 3 -1. + <_> + 0 23 15 1 3. + 0 + <_> + + <_> + 0 23 6 2 -1. + <_> + 3 23 3 2 2. + 0 + <_> + + <_> + 1 0 1 2 -1. + <_> + 1 1 1 1 2. + 0 + <_> + + <_> + 1 0 1 4 -1. + <_> + 1 1 1 2 2. + 0 + <_> + + <_> + 1 0 3 3 -1. + <_> + 2 1 1 1 9. + 0 + <_> + + <_> + 1 0 1 9 -1. + <_> + 1 3 1 3 3. + 0 + <_> + + <_> + 1 0 1 12 -1. + <_> + 1 6 1 6 2. + 0 + <_> + + <_> + 1 0 1 22 -1. + <_> + 1 11 1 11 2. + 0 + <_> + + <_> + 1 0 2 2 -1. + <_> + 1 1 2 1 2. + 0 + <_> + + <_> + 1 0 4 6 -1. + <_> + 1 0 2 3 2. + <_> + 3 3 2 3 2. + 0 + <_> + + <_> + 1 0 8 5 -1. + <_> + 3 0 4 5 2. + 0 + <_> + + <_> + 1 0 6 21 -1. + <_> + 3 7 2 7 9. + 0 + <_> + + <_> + 1 0 12 1 -1. + <_> + 4 0 6 1 2. + 0 + <_> + + <_> + 1 0 9 4 -1. + <_> + 4 0 3 4 3. + 0 + <_> + + <_> + 1 0 8 1 -1. + <_> + 5 0 4 1 2. + 0 + <_> + + <_> + 1 0 8 12 -1. + <_> + 1 0 4 6 2. + <_> + 5 6 4 6 2. + 0 + <_> + + <_> + 1 0 14 6 -1. + <_> + 8 0 7 6 2. + 0 + <_> + + <_> + 1 0 10 4 -1. + <_> + 1 1 10 2 2. + 0 + <_> + + <_> + 1 1 1 3 -1. + <_> + 1 2 1 1 3. + 0 + <_> + + <_> + 1 1 1 8 -1. + <_> + 1 3 1 4 2. + 0 + <_> + + <_> + 1 1 1 12 -1. + <_> + 1 5 1 4 3. + 0 + <_> + + <_> + 1 1 1 14 -1. + <_> + 1 8 1 7 2. + 0 + <_> + + <_> + 1 1 1 21 -1. + <_> + 1 8 1 7 3. + 0 + <_> + + <_> + 1 1 1 16 -1. + <_> + 1 9 1 8 2. + 0 + <_> + + <_> + 1 1 1 20 -1. + <_> + 1 11 1 10 2. + 0 + <_> + + <_> + 1 1 14 2 -1. + <_> + 1 1 7 1 2. + <_> + 8 2 7 1 2. + 0 + <_> + + <_> + 1 1 14 4 -1. + <_> + 1 2 14 2 2. + 0 + <_> + + <_> + 1 2 1 6 -1. + <_> + 1 4 1 2 3. + 0 + <_> + + <_> + 1 2 1 9 -1. + <_> + 1 5 1 3 3. + 0 + <_> + + <_> + 1 2 1 12 -1. + <_> + 1 8 1 6 2. + 0 + <_> + + <_> + 1 2 1 21 -1. + <_> + 1 9 1 7 3. + 0 + <_> + + <_> + 1 2 8 1 -1. + <_> + 3 4 4 1 2. + 1 + <_> + + <_> + 1 2 4 5 -1. + <_> + 3 2 2 5 2. + 0 + <_> + + <_> + 1 2 4 20 -1. + <_> + 3 2 2 20 2. + 0 + <_> + + <_> + 1 3 1 8 -1. + <_> + 1 5 1 4 2. + 0 + <_> + + <_> + 1 3 1 12 -1. + <_> + 1 7 1 4 3. + 0 + <_> + + <_> + 1 3 1 10 -1. + <_> + 1 8 1 5 2. + 0 + <_> + + <_> + 1 3 12 3 -1. + <_> + 5 4 4 1 9. + 0 + <_> + + <_> + 1 3 12 22 -1. + <_> + 5 3 4 22 3. + 0 + <_> + + <_> + 1 3 14 1 -1. + <_> + 8 3 7 1 2. + 0 + <_> + + <_> + 1 4 4 19 -1. + <_> + 3 4 2 19 2. + 0 + <_> + + <_> + 1 5 3 15 -1. + <_> + 2 10 1 5 9. + 0 + <_> + + <_> + 1 5 14 1 -1. + <_> + 8 5 7 1 2. + 0 + <_> + + <_> + 1 7 1 4 -1. + <_> + 1 9 1 2 2. + 0 + <_> + + <_> + 1 8 1 14 -1. + <_> + 1 15 1 7 2. + 0 + <_> + + <_> + 1 8 4 9 -1. + <_> + 1 11 4 3 3. + 0 + <_> + + <_> + 1 8 13 8 -1. + <_> + 1 12 13 4 2. + 0 + <_> + + <_> + 1 9 1 12 -1. + <_> + 1 15 1 6 2. + 0 + <_> + + <_> + 1 9 12 6 -1. + <_> + 1 12 12 3 2. + 0 + <_> + + <_> + 1 9 13 6 -1. + <_> + 1 12 13 3 2. + 0 + <_> + + <_> + 1 10 1 12 -1. + <_> + 1 14 1 4 3. + 0 + <_> + + <_> + 1 10 2 6 -1. + <_> + 1 12 2 2 3. + 0 + <_> + + <_> + 1 10 12 1 -1. + <_> + 4 13 6 1 2. + 1 + <_> + + <_> + 1 10 12 1 -1. + <_> + 1 10 6 1 2. + 1 + <_> + + <_> + 1 11 1 6 -1. + <_> + 1 13 1 2 3. + 0 + <_> + + <_> + 1 11 1 8 -1. + <_> + 1 13 1 4 2. + 0 + <_> + + <_> + 1 11 8 13 -1. + <_> + 3 11 4 13 2. + 0 + <_> + + <_> + 1 12 1 8 -1. + <_> + 1 16 1 4 2. + 0 + <_> + + <_> + 1 12 12 1 -1. + <_> + 4 12 6 1 2. + 0 + <_> + + <_> + 1 13 1 8 -1. + <_> + 1 15 1 4 2. + 0 + <_> + + <_> + 1 13 2 4 -1. + <_> + 1 15 2 2 2. + 0 + <_> + + <_> + 1 13 12 1 -1. + <_> + 4 13 6 1 2. + 0 + <_> + + <_> + 1 15 4 7 -1. + <_> + 3 15 2 7 2. + 0 + <_> + + <_> + 1 15 10 1 -1. + <_> + 6 15 5 1 2. + 0 + <_> + + <_> + 1 16 4 5 -1. + <_> + 2 16 2 5 2. + 0 + <_> + + <_> + 1 16 6 6 -1. + <_> + 3 18 2 2 9. + 0 + <_> + + <_> + 1 16 8 3 -1. + <_> + 3 16 4 3 2. + 0 + <_> + + <_> + 1 16 8 7 -1. + <_> + 3 16 4 7 2. + 0 + <_> + + <_> + 1 17 8 3 -1. + <_> + 3 17 4 3 2. + 0 + <_> + + <_> + 1 17 8 4 -1. + <_> + 3 17 4 4 2. + 0 + <_> + + <_> + 1 17 4 5 -1. + <_> + 3 17 2 5 2. + 0 + <_> + + <_> + 1 18 4 2 -1. + <_> + 3 18 2 2 2. + 0 + <_> + + <_> + 1 18 8 2 -1. + <_> + 3 18 4 2 2. + 0 + <_> + + <_> + 1 18 8 3 -1. + <_> + 3 18 4 3 2. + 0 + <_> + + <_> + 1 19 4 6 -1. + <_> + 1 19 2 3 2. + <_> + 3 22 2 3 2. + 0 + <_> + + <_> + 1 19 8 1 -1. + <_> + 5 19 4 1 2. + 0 + <_> + + <_> + 1 20 4 1 -1. + <_> + 1 20 2 1 2. + 1 + <_> + + <_> + 1 20 8 5 -1. + <_> + 3 20 4 5 2. + 0 + <_> + + <_> + 1 20 12 2 -1. + <_> + 4 20 6 2 2. + 0 + <_> + + <_> + 1 20 8 4 -1. + <_> + 1 20 4 2 2. + <_> + 5 22 4 2 2. + 0 + <_> + + <_> + 1 20 14 1 -1. + <_> + 8 20 7 1 2. + 0 + <_> + + <_> + 1 20 14 2 -1. + <_> + 1 20 7 1 2. + <_> + 8 21 7 1 2. + 0 + <_> + + <_> + 1 20 14 5 -1. + <_> + 8 20 7 5 2. + 0 + <_> + + <_> + 1 21 12 1 -1. + <_> + 5 21 4 1 3. + 0 + <_> + + <_> + 1 21 8 4 -1. + <_> + 1 21 4 2 2. + <_> + 5 23 4 2 2. + 0 + <_> + + <_> + 1 21 12 4 -1. + <_> + 1 22 12 2 2. + 0 + <_> + + <_> + 1 21 14 4 -1. + <_> + 1 23 14 2 2. + 0 + <_> + + <_> + 1 22 8 3 -1. + <_> + 3 22 4 3 2. + 0 + <_> + + <_> + 1 22 5 3 -1. + <_> + 1 23 5 1 3. + 0 + <_> + + <_> + 1 22 14 2 -1. + <_> + 8 22 7 2 2. + 0 + <_> + + <_> + 1 23 6 2 -1. + <_> + 3 23 2 2 3. + 0 + <_> + + <_> + 1 23 8 2 -1. + <_> + 5 23 4 2 2. + 0 + <_> + + <_> + 1 23 12 2 -1. + <_> + 7 23 6 2 2. + 0 + <_> + + <_> + 1 24 6 1 -1. + <_> + 4 24 3 1 2. + 0 + <_> + + <_> + 1 24 12 1 -1. + <_> + 4 24 6 1 2. + 0 + <_> + + <_> + 1 24 12 1 -1. + <_> + 5 24 4 1 3. + 0 + <_> + + <_> + 2 0 3 2 -1. + <_> + 3 0 1 2 3. + 0 + <_> + + <_> + 2 0 4 1 -1. + <_> + 4 0 2 1 2. + 0 + <_> + + <_> + 2 0 8 1 -1. + <_> + 4 0 4 1 2. + 0 + <_> + + <_> + 2 0 3 14 -1. + <_> + 2 7 3 7 2. + 0 + <_> + + <_> + 2 0 12 1 -1. + <_> + 6 0 4 1 3. + 0 + <_> + + <_> + 2 0 8 12 -1. + <_> + 2 0 4 6 2. + <_> + 6 6 4 6 2. + 0 + <_> + + <_> + 2 0 5 2 -1. + <_> + 2 1 5 1 2. + 0 + <_> + + <_> + 2 0 5 24 -1. + <_> + 2 6 5 12 2. + 0 + <_> + + <_> + 2 0 6 6 -1. + <_> + 2 3 6 3 2. + 0 + <_> + + <_> + 2 0 7 4 -1. + <_> + 2 2 7 2 2. + 0 + <_> + + <_> + 2 0 12 4 -1. + <_> + 2 2 12 2 2. + 0 + <_> + + <_> + 2 0 13 4 -1. + <_> + 2 1 13 2 2. + 0 + <_> + + <_> + 2 1 6 1 -1. + <_> + 5 1 3 1 2. + 0 + <_> + + <_> + 2 1 10 4 -1. + <_> + 2 2 10 2 2. + 0 + <_> + + <_> + 2 2 2 5 -1. + <_> + 3 2 1 5 2. + 0 + <_> + + <_> + 2 2 2 21 -1. + <_> + 3 2 1 21 2. + 0 + <_> + + <_> + 2 2 12 9 -1. + <_> + 6 5 4 3 9. + 0 + <_> + + <_> + 2 3 12 3 -1. + <_> + 5 3 6 3 2. + 0 + <_> + + <_> + 2 3 12 3 -1. + <_> + 6 4 4 1 9. + 0 + <_> + + <_> + 2 3 5 12 -1. + <_> + 2 7 5 4 3. + 0 + <_> + + <_> + 2 4 4 2 -1. + <_> + 2 4 2 1 2. + <_> + 4 5 2 1 2. + 0 + <_> + + <_> + 2 4 6 3 -1. + <_> + 4 5 2 1 9. + 0 + <_> + + <_> + 2 4 6 15 -1. + <_> + 4 9 2 5 9. + 0 + <_> + + <_> + 2 4 12 2 -1. + <_> + 5 4 6 2 2. + 0 + <_> + + <_> + 2 5 12 2 -1. + <_> + 5 5 6 2 2. + 0 + <_> + + <_> + 2 5 7 12 -1. + <_> + 2 11 7 6 2. + 0 + <_> + + <_> + 2 6 2 3 -1. + <_> + 3 6 1 3 2. + 0 + <_> + + <_> + 2 6 1 12 -1. + <_> + 2 9 1 6 2. + 0 + <_> + + <_> + 2 6 8 2 -1. + <_> + 4 8 4 2 2. + 1 + <_> + + <_> + 2 6 6 12 -1. + <_> + 4 10 2 4 9. + 0 + <_> + + <_> + 2 6 12 10 -1. + <_> + 2 11 12 5 2. + 0 + <_> + + <_> + 2 8 4 6 -1. + <_> + 2 11 4 3 2. + 0 + <_> + + <_> + 2 8 11 12 -1. + <_> + 2 12 11 4 3. + 0 + <_> + + <_> + 2 9 12 2 -1. + <_> + 5 12 6 2 2. + 1 + <_> + + <_> + 2 10 11 6 -1. + <_> + 2 13 11 3 2. + 0 + <_> + + <_> + 2 11 4 5 -1. + <_> + 4 11 2 5 2. + 0 + <_> + + <_> + 2 11 12 2 -1. + <_> + 5 11 6 2 2. + 0 + <_> + + <_> + 2 12 12 1 -1. + <_> + 5 12 6 1 2. + 0 + <_> + + <_> + 2 12 13 9 -1. + <_> + 2 15 13 3 3. + 0 + <_> + + <_> + 2 14 2 11 -1. + <_> + 3 14 1 11 2. + 0 + <_> + + <_> + 2 16 4 3 -1. + <_> + 4 16 2 3 2. + 0 + <_> + + <_> + 2 16 12 6 -1. + <_> + 6 18 4 2 9. + 0 + <_> + + <_> + 2 17 4 8 -1. + <_> + 2 17 2 4 2. + <_> + 4 21 2 4 2. + 0 + <_> + + <_> + 2 18 2 1 -1. + <_> + 3 18 1 1 2. + 0 + <_> + + <_> + 2 18 6 2 -1. + <_> + 4 18 2 2 3. + 0 + <_> + + <_> + 2 18 13 6 -1. + <_> + 2 21 13 3 2. + 0 + <_> + + <_> + 2 19 6 4 -1. + <_> + 2 19 3 2 2. + <_> + 5 21 3 2 2. + 0 + <_> + + <_> + 2 20 4 4 -1. + <_> + 2 20 2 2 2. + <_> + 4 22 2 2 2. + 0 + <_> + + <_> + 2 20 6 1 -1. + <_> + 5 20 3 1 2. + 0 + <_> + + <_> + 2 20 12 1 -1. + <_> + 5 20 6 1 2. + 0 + <_> + + <_> + 2 20 12 3 -1. + <_> + 5 20 6 3 2. + 0 + <_> + + <_> + 2 20 12 5 -1. + <_> + 5 20 6 5 2. + 0 + <_> + + <_> + 2 21 12 1 -1. + <_> + 6 21 4 1 3. + 0 + <_> + + <_> + 2 21 9 3 -1. + <_> + 2 22 9 1 3. + 0 + <_> + + <_> + 2 21 9 4 -1. + <_> + 2 23 9 2 2. + 0 + <_> + + <_> + 2 21 13 4 -1. + <_> + 2 22 13 2 2. + 0 + <_> + + <_> + 2 22 12 2 -1. + <_> + 8 22 6 2 2. + 0 + <_> + + <_> + 2 23 4 2 -1. + <_> + 3 23 2 2 2. + 0 + <_> + + <_> + 2 23 6 1 -1. + <_> + 4 23 2 1 3. + 0 + <_> + + <_> + 2 24 6 1 -1. + <_> + 5 24 3 1 2. + 0 + <_> + + <_> + 2 24 12 1 -1. + <_> + 6 24 4 1 3. + 0 + <_> + + <_> + 3 0 3 2 -1. + <_> + 4 0 1 2 3. + 0 + <_> + + <_> + 3 0 4 2 -1. + <_> + 4 1 2 2 2. + 1 + <_> + + <_> + 3 0 4 1 -1. + <_> + 5 0 2 1 2. + 0 + <_> + + <_> + 3 0 2 4 -1. + <_> + 3 2 2 2 2. + 0 + <_> + + <_> + 3 0 6 2 -1. + <_> + 5 2 2 2 3. + 1 + <_> + + <_> + 3 0 9 1 -1. + <_> + 6 0 3 1 3. + 0 + <_> + + <_> + 3 0 3 24 -1. + <_> + 3 6 3 12 2. + 0 + <_> + + <_> + 3 0 4 9 -1. + <_> + 3 3 4 3 3. + 0 + <_> + + <_> + 3 0 12 5 -1. + <_> + 9 0 6 5 2. + 0 + <_> + + <_> + 3 0 12 6 -1. + <_> + 9 0 6 6 2. + 0 + <_> + + <_> + 3 0 8 6 -1. + <_> + 3 3 8 3 2. + 0 + <_> + + <_> + 3 0 9 6 -1. + <_> + 3 2 9 2 3. + 0 + <_> + + <_> + 3 0 10 18 -1. + <_> + 3 6 10 6 3. + 0 + <_> + + <_> + 3 0 12 24 -1. + <_> + 3 6 12 12 2. + 0 + <_> + + <_> + 3 1 4 1 -1. + <_> + 4 1 2 1 2. + 0 + <_> + + <_> + 3 1 4 2 -1. + <_> + 4 2 2 2 2. + 1 + <_> + + <_> + 3 1 8 1 -1. + <_> + 7 1 4 1 2. + 0 + <_> + + <_> + 3 1 5 4 -1. + <_> + 3 3 5 2 2. + 0 + <_> + + <_> + 3 1 6 20 -1. + <_> + 3 11 6 10 2. + 0 + <_> + + <_> + 3 2 9 21 -1. + <_> + 6 9 3 7 9. + 0 + <_> + + <_> + 3 2 6 18 -1. + <_> + 3 2 3 9 2. + <_> + 6 11 3 9 2. + 0 + <_> + + <_> + 3 2 6 20 -1. + <_> + 3 2 3 10 2. + <_> + 6 12 3 10 2. + 0 + <_> + + <_> + 3 2 12 4 -1. + <_> + 9 2 6 4 2. + 0 + <_> + + <_> + 3 2 9 4 -1. + <_> + 3 3 9 2 2. + 0 + <_> + + <_> + 3 2 9 4 -1. + <_> + 3 4 9 2 2. + 0 + <_> + + <_> + 3 2 9 15 -1. + <_> + 3 7 9 5 3. + 0 + <_> + + <_> + 3 3 12 3 -1. + <_> + 6 3 6 3 2. + 0 + <_> + + <_> + 3 3 9 9 -1. + <_> + 6 6 3 3 9. + 0 + <_> + + <_> + 3 3 6 14 -1. + <_> + 3 3 3 7 2. + <_> + 6 10 3 7 2. + 0 + <_> + + <_> + 3 3 8 16 -1. + <_> + 3 7 8 8 2. + 0 + <_> + + <_> + 3 4 2 2 -1. + <_> + 3 5 2 1 2. + 0 + <_> + + <_> + 3 4 9 9 -1. + <_> + 6 7 3 3 9. + 0 + <_> + + <_> + 3 4 12 15 -1. + <_> + 7 9 4 5 9. + 0 + <_> + + <_> + 3 4 8 3 -1. + <_> + 3 5 8 1 3. + 0 + <_> + + <_> + 3 5 12 3 -1. + <_> + 9 5 6 3 2. + 0 + <_> + + <_> + 3 6 8 2 -1. + <_> + 5 6 4 2 2. + 0 + <_> + + <_> + 3 6 9 10 -1. + <_> + 3 11 9 5 2. + 0 + <_> + + <_> + 3 7 12 1 -1. + <_> + 6 10 6 1 2. + 1 + <_> + + <_> + 3 7 6 12 -1. + <_> + 3 7 3 6 2. + <_> + 6 13 3 6 2. + 0 + <_> + + <_> + 3 7 12 3 -1. + <_> + 7 7 4 3 3. + 0 + <_> + + <_> + 3 7 4 8 -1. + <_> + 3 11 4 4 2. + 0 + <_> + + <_> + 3 7 12 18 -1. + <_> + 9 7 6 18 2. + 0 + <_> + + <_> + 3 8 12 2 -1. + <_> + 6 11 6 2 2. + 1 + <_> + + <_> + 3 8 9 12 -1. + <_> + 3 14 9 6 2. + 0 + <_> + + <_> + 3 8 11 8 -1. + <_> + 3 12 11 4 2. + 0 + <_> + + <_> + 3 9 6 8 -1. + <_> + 3 9 3 4 2. + <_> + 6 13 3 4 2. + 0 + <_> + + <_> + 3 9 12 8 -1. + <_> + 6 9 6 8 2. + 0 + <_> + + <_> + 3 9 9 14 -1. + <_> + 3 16 9 7 2. + 0 + <_> + + <_> + 3 10 6 12 -1. + <_> + 5 14 2 4 9. + 0 + <_> + + <_> + 3 10 9 12 -1. + <_> + 6 14 3 4 9. + 0 + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 15 8 5 2. + 0 + <_> + + <_> + 3 11 9 9 -1. + <_> + 6 14 3 3 9. + 0 + <_> + + <_> + 3 11 9 12 -1. + <_> + 6 15 3 4 9. + 0 + <_> + + <_> + 3 11 5 2 -1. + <_> + 3 11 5 1 2. + 1 + <_> + + <_> + 3 12 6 2 -1. + <_> + 3 12 3 1 2. + <_> + 6 13 3 1 2. + 0 + <_> + + <_> + 3 13 6 5 -1. + <_> + 5 13 2 5 3. + 0 + <_> + + <_> + 3 15 2 2 -1. + <_> + 3 15 1 2 2. + 1 + <_> + + <_> + 3 15 12 2 -1. + <_> + 6 15 6 2 2. + 0 + <_> + + <_> + 3 15 12 5 -1. + <_> + 6 15 6 5 2. + 0 + <_> + + <_> + 3 16 4 4 -1. + <_> + 4 16 2 4 2. + 0 + <_> + + <_> + 3 16 12 1 -1. + <_> + 6 16 6 1 2. + 0 + <_> + + <_> + 3 16 9 6 -1. + <_> + 6 18 3 2 9. + 0 + <_> + + <_> + 3 17 2 4 -1. + <_> + 4 17 1 4 2. + 0 + <_> + + <_> + 3 17 4 4 -1. + <_> + 3 17 2 2 2. + <_> + 5 19 2 2 2. + 0 + <_> + + <_> + 3 17 12 2 -1. + <_> + 6 17 6 2 2. + 0 + <_> + + <_> + 3 17 9 8 -1. + <_> + 3 21 9 4 2. + 0 + <_> + + <_> + 3 19 2 2 -1. + <_> + 4 19 1 2 2. + 0 + <_> + + <_> + 3 20 5 3 -1. + <_> + 3 21 5 1 3. + 0 + <_> + + <_> + 3 20 9 3 -1. + <_> + 3 21 9 1 3. + 0 + <_> + + <_> + 3 20 12 3 -1. + <_> + 3 21 12 1 3. + 0 + <_> + + <_> + 3 21 5 3 -1. + <_> + 3 22 5 1 3. + 0 + <_> + + <_> + 3 21 11 3 -1. + <_> + 3 22 11 1 3. + 0 + <_> + + <_> + 3 22 3 3 -1. + <_> + 4 22 1 3 3. + 0 + <_> + + <_> + 3 22 9 2 -1. + <_> + 3 23 9 1 2. + 0 + <_> + + <_> + 3 22 10 3 -1. + <_> + 3 23 10 1 3. + 0 + <_> + + <_> + 3 22 12 3 -1. + <_> + 3 23 12 1 3. + 0 + <_> + + <_> + 3 23 4 1 -1. + <_> + 4 23 2 1 2. + 0 + <_> + + <_> + 3 23 6 1 -1. + <_> + 5 23 2 1 3. + 0 + <_> + + <_> + 3 23 10 2 -1. + <_> + 3 24 10 1 2. + 0 + <_> + + <_> + 3 24 3 1 -1. + <_> + 4 24 1 1 3. + 0 + <_> + + <_> + 3 24 4 1 -1. + <_> + 4 24 2 1 2. + 0 + <_> + + <_> + 3 24 6 1 -1. + <_> + 6 24 3 1 2. + 0 + <_> + + <_> + 3 24 9 1 -1. + <_> + 6 24 3 1 3. + 0 + <_> + + <_> + 3 24 12 1 -1. + <_> + 6 24 6 1 2. + 0 + <_> + + <_> + 4 0 3 3 -1. + <_> + 5 0 1 3 3. + 0 + <_> + + <_> + 4 0 4 3 -1. + <_> + 5 1 2 3 2. + 1 + <_> + + <_> + 4 0 6 4 -1. + <_> + 6 2 2 4 3. + 1 + <_> + + <_> + 4 0 2 24 -1. + <_> + 4 6 2 12 2. + 0 + <_> + + <_> + 4 0 4 12 -1. + <_> + 4 0 2 6 2. + <_> + 6 6 2 6 2. + 0 + <_> + + <_> + 4 0 3 24 -1. + <_> + 4 6 3 12 2. + 0 + <_> + + <_> + 4 0 6 6 -1. + <_> + 4 2 6 2 3. + 0 + <_> + + <_> + 4 0 7 3 -1. + <_> + 4 1 7 1 3. + 0 + <_> + + <_> + 4 0 7 6 -1. + <_> + 4 2 7 2 3. + 0 + <_> + + <_> + 4 0 8 6 -1. + <_> + 4 3 8 3 2. + 0 + <_> + + <_> + 4 0 8 18 -1. + <_> + 4 6 8 6 3. + 0 + <_> + + <_> + 4 0 9 18 -1. + <_> + 4 6 9 6 3. + 0 + <_> + + <_> + 4 1 1 4 -1. + <_> + 4 2 1 2 2. + 0 + <_> + + <_> + 4 1 4 2 -1. + <_> + 5 2 2 2 2. + 1 + <_> + + <_> + 4 1 6 1 -1. + <_> + 6 1 2 1 3. + 0 + <_> + + <_> + 4 1 2 24 -1. + <_> + 4 7 2 12 2. + 0 + <_> + + <_> + 4 1 5 2 -1. + <_> + 4 2 5 1 2. + 0 + <_> + + <_> + 4 1 7 6 -1. + <_> + 4 3 7 2 3. + 0 + <_> + + <_> + 4 2 3 2 -1. + <_> + 5 3 1 2 3. + 1 + <_> + + <_> + 4 2 6 11 -1. + <_> + 6 2 2 11 3. + 0 + <_> + + <_> + 4 2 5 6 -1. + <_> + 4 5 5 3 2. + 0 + <_> + + <_> + 4 2 7 4 -1. + <_> + 4 4 7 2 2. + 0 + <_> + + <_> + 4 3 6 6 -1. + <_> + 6 5 2 2 9. + 0 + <_> + + <_> + 4 3 6 9 -1. + <_> + 6 6 2 3 9. + 0 + <_> + + <_> + 4 3 6 2 -1. + <_> + 4 4 6 1 2. + 0 + <_> + + <_> + 4 4 6 9 -1. + <_> + 6 7 2 3 9. + 0 + <_> + + <_> + 4 4 6 3 -1. + <_> + 4 5 6 1 3. + 0 + <_> + + <_> + 4 4 8 2 -1. + <_> + 4 5 8 1 2. + 0 + <_> + + <_> + 4 6 3 1 -1. + <_> + 5 6 1 1 3. + 0 + <_> + + <_> + 4 6 6 3 -1. + <_> + 6 6 2 3 3. + 0 + <_> + + <_> + 4 7 6 4 -1. + <_> + 6 7 2 4 3. + 0 + <_> + + <_> + 4 7 6 14 -1. + <_> + 6 7 2 14 3. + 0 + <_> + + <_> + 4 7 6 15 -1. + <_> + 6 7 2 15 3. + 0 + <_> + + <_> + 4 7 6 3 -1. + <_> + 7 7 3 3 2. + 0 + <_> + + <_> + 4 8 10 8 -1. + <_> + 4 10 10 4 2. + 0 + <_> + + <_> + 4 9 6 5 -1. + <_> + 6 9 2 5 3. + 0 + <_> + + <_> + 4 11 6 9 -1. + <_> + 6 14 2 3 9. + 0 + <_> + + <_> + 4 11 6 2 -1. + <_> + 4 12 6 1 2. + 0 + <_> + + <_> + 4 12 4 2 -1. + <_> + 5 13 2 2 2. + 1 + <_> + + <_> + 4 12 4 2 -1. + <_> + 4 12 2 1 2. + <_> + 6 13 2 1 2. + 0 + <_> + + <_> + 4 12 6 2 -1. + <_> + 4 13 6 1 2. + 0 + <_> + + <_> + 4 13 10 12 -1. + <_> + 4 13 5 6 2. + <_> + 9 19 5 6 2. + 0 + <_> + + <_> + 4 14 8 5 -1. + <_> + 6 14 4 5 2. + 0 + <_> + + <_> + 4 15 4 4 -1. + <_> + 5 15 2 4 2. + 0 + <_> + + <_> + 4 16 4 2 -1. + <_> + 5 16 2 2 2. + 0 + <_> + + <_> + 4 16 4 4 -1. + <_> + 5 16 2 4 2. + 0 + <_> + + <_> + 4 16 9 6 -1. + <_> + 7 18 3 2 9. + 0 + <_> + + <_> + 4 16 6 2 -1. + <_> + 4 16 6 1 2. + 1 + <_> + + <_> + 4 17 8 4 -1. + <_> + 6 17 4 4 2. + 0 + <_> + + <_> + 4 17 4 8 -1. + <_> + 6 17 2 8 2. + 0 + <_> + + <_> + 4 18 4 3 -1. + <_> + 3 19 4 1 3. + 1 + <_> + + <_> + 4 19 2 2 -1. + <_> + 5 19 1 2 2. + 0 + <_> + + <_> + 4 19 4 6 -1. + <_> + 4 19 2 3 2. + <_> + 6 22 2 3 2. + 0 + <_> + + <_> + 4 20 2 4 -1. + <_> + 4 20 1 2 2. + <_> + 5 22 1 2 2. + 0 + <_> + + <_> + 4 20 8 4 -1. + <_> + 4 22 8 2 2. + 0 + <_> + + <_> + 4 21 4 4 -1. + <_> + 5 21 2 4 2. + 0 + <_> + + <_> + 4 22 3 2 -1. + <_> + 5 22 1 2 3. + 0 + <_> + + <_> + 4 22 3 3 -1. + <_> + 5 22 1 3 3. + 0 + <_> + + <_> + 4 22 9 2 -1. + <_> + 4 23 9 1 2. + 0 + <_> + + <_> + 4 22 11 3 -1. + <_> + 4 23 11 1 3. + 0 + <_> + + <_> + 4 23 4 2 -1. + <_> + 5 23 2 2 2. + 0 + <_> + + <_> + 4 24 4 1 -1. + <_> + 6 24 2 1 2. + 0 + <_> + + <_> + 5 0 1 4 -1. + <_> + 5 0 1 2 2. + 1 + <_> + + <_> + 5 0 3 3 -1. + <_> + 6 1 1 3 3. + 1 + <_> + + <_> + 5 0 3 4 -1. + <_> + 6 1 1 4 3. + 1 + <_> + + <_> + 5 0 4 4 -1. + <_> + 6 1 2 4 2. + 1 + <_> + + <_> + 5 0 6 5 -1. + <_> + 7 2 2 5 3. + 1 + <_> + + <_> + 5 0 3 8 -1. + <_> + 5 4 3 4 2. + 0 + <_> + + <_> + 5 0 3 24 -1. + <_> + 5 6 3 12 2. + 0 + <_> + + <_> + 5 0 8 1 -1. + <_> + 5 0 4 1 2. + 1 + <_> + + <_> + 5 0 4 24 -1. + <_> + 5 6 4 12 2. + 0 + <_> + + <_> + 5 0 4 20 -1. + <_> + 5 10 4 10 2. + 0 + <_> + + <_> + 5 0 5 8 -1. + <_> + 5 2 5 4 2. + 0 + <_> + + <_> + 5 0 8 4 -1. + <_> + 5 1 8 2 2. + 0 + <_> + + <_> + 5 0 8 20 -1. + <_> + 5 10 8 10 2. + 0 + <_> + + <_> + 5 1 3 2 -1. + <_> + 6 1 1 2 3. + 0 + <_> + + <_> + 5 1 6 20 -1. + <_> + 7 1 2 20 3. + 0 + <_> + + <_> + 5 1 9 9 -1. + <_> + 8 4 3 3 9. + 0 + <_> + + <_> + 5 1 10 3 -1. + <_> + 5 1 5 3 2. + 1 + <_> + + <_> + 5 2 9 4 -1. + <_> + 8 5 3 4 3. + 1 + <_> + + <_> + 5 3 2 2 -1. + <_> + 5 3 1 1 2. + <_> + 6 4 1 1 2. + 0 + <_> + + <_> + 5 3 4 3 -1. + <_> + 6 4 2 3 2. + 1 + <_> + + <_> + 5 3 2 9 -1. + <_> + 5 6 2 3 3. + 0 + <_> + + <_> + 5 3 5 4 -1. + <_> + 5 5 5 2 2. + 0 + <_> + + <_> + 5 3 6 2 -1. + <_> + 5 4 6 1 2. + 0 + <_> + + <_> + 5 3 7 4 -1. + <_> + 5 3 7 2 2. + 1 + <_> + + <_> + 5 4 3 1 -1. + <_> + 6 5 1 1 3. + 1 + <_> + + <_> + 5 4 2 9 -1. + <_> + 5 7 2 3 3. + 0 + <_> + + <_> + 5 4 6 9 -1. + <_> + 7 7 2 3 9. + 0 + <_> + + <_> + 5 4 8 16 -1. + <_> + 5 12 8 8 2. + 0 + <_> + + <_> + 5 5 2 6 -1. + <_> + 5 7 2 2 3. + 0 + <_> + + <_> + 5 5 4 8 -1. + <_> + 5 7 4 4 2. + 0 + <_> + + <_> + 5 5 8 20 -1. + <_> + 5 5 4 10 2. + <_> + 9 15 4 10 2. + 0 + <_> + + <_> + 5 6 10 14 -1. + <_> + 5 13 10 7 2. + 0 + <_> + + <_> + 5 8 3 2 -1. + <_> + 6 8 1 2 3. + 0 + <_> + + <_> + 5 8 6 9 -1. + <_> + 5 11 6 3 3. + 0 + <_> + + <_> + 5 9 5 16 -1. + <_> + 5 13 5 8 2. + 0 + <_> + + <_> + 5 10 6 9 -1. + <_> + 7 13 2 3 9. + 0 + <_> + + <_> + 5 10 6 3 -1. + <_> + 5 10 3 3 2. + 1 + <_> + + <_> + 5 10 10 4 -1. + <_> + 4 11 10 2 2. + 1 + <_> + + <_> + 5 11 3 9 -1. + <_> + 6 14 1 3 9. + 0 + <_> + + <_> + 5 13 6 5 -1. + <_> + 7 13 2 5 3. + 0 + <_> + + <_> + 5 14 6 4 -1. + <_> + 5 14 6 2 2. + 1 + <_> + + <_> + 5 15 6 6 -1. + <_> + 5 15 3 3 2. + <_> + 8 18 3 3 2. + 0 + <_> + + <_> + 5 15 10 8 -1. + <_> + 5 15 5 4 2. + <_> + 10 19 5 4 2. + 0 + <_> + + <_> + 5 16 4 4 -1. + <_> + 6 16 2 4 2. + 0 + <_> + + <_> + 5 16 4 5 -1. + <_> + 6 16 2 5 2. + 0 + <_> + + <_> + 5 16 2 6 -1. + <_> + 5 19 2 3 2. + 0 + <_> + + <_> + 5 17 3 4 -1. + <_> + 4 18 3 2 2. + 1 + <_> + + <_> + 5 17 4 4 -1. + <_> + 4 18 4 2 2. + 1 + <_> + + <_> + 5 17 10 2 -1. + <_> + 10 17 5 2 2. + 0 + <_> + + <_> + 5 18 2 2 -1. + <_> + 6 18 1 2 2. + 0 + <_> + + <_> + 5 18 5 3 -1. + <_> + 5 19 5 1 3. + 0 + <_> + + <_> + 5 19 2 2 -1. + <_> + 6 19 1 2 2. + 0 + <_> + + <_> + 6 0 3 3 -1. + <_> + 7 0 1 3 3. + 0 + <_> + + <_> + 6 0 6 2 -1. + <_> + 8 0 2 2 3. + 0 + <_> + + <_> + 6 0 6 10 -1. + <_> + 6 0 3 5 2. + <_> + 9 5 3 5 2. + 0 + <_> + + <_> + 6 0 8 2 -1. + <_> + 6 1 8 1 2. + 0 + <_> + + <_> + 6 0 9 9 -1. + <_> + 6 3 9 3 3. + 0 + <_> + + <_> + 6 0 9 22 -1. + <_> + 6 11 9 11 2. + 0 + <_> + + <_> + 6 0 9 24 -1. + <_> + 6 12 9 12 2. + 0 + <_> + + <_> + 6 1 3 15 -1. + <_> + 7 6 1 5 9. + 0 + <_> + + <_> + 6 1 4 5 -1. + <_> + 7 2 2 5 2. + 1 + <_> + + <_> + 6 1 2 8 -1. + <_> + 6 5 2 4 2. + 0 + <_> + + <_> + 6 1 9 12 -1. + <_> + 9 5 3 4 9. + 0 + <_> + + <_> + 6 1 8 4 -1. + <_> + 6 1 4 2 2. + <_> + 10 3 4 2 2. + 0 + <_> + + <_> + 6 1 6 4 -1. + <_> + 6 3 6 2 2. + 0 + <_> + + <_> + 6 1 9 2 -1. + <_> + 6 2 9 1 2. + 0 + <_> + + <_> + 6 2 3 18 -1. + <_> + 6 11 3 9 2. + 0 + <_> + + <_> + 6 2 4 4 -1. + <_> + 6 4 4 2 2. + 0 + <_> + + <_> + 6 2 5 3 -1. + <_> + 5 3 5 1 3. + 1 + <_> + + <_> + 6 2 5 6 -1. + <_> + 4 4 5 2 3. + 1 + <_> + + <_> + 6 3 3 12 -1. + <_> + 7 7 1 4 9. + 0 + <_> + + <_> + 6 3 2 4 -1. + <_> + 6 5 2 2 2. + 0 + <_> + + <_> + 6 3 3 4 -1. + <_> + 6 5 3 2 2. + 0 + <_> + + <_> + 6 3 6 16 -1. + <_> + 6 11 6 8 2. + 0 + <_> + + <_> + 6 4 3 3 -1. + <_> + 7 5 1 1 9. + 0 + <_> + + <_> + 6 4 2 1 -1. + <_> + 6 4 1 1 2. + 1 + <_> + + <_> + 6 4 2 2 -1. + <_> + 6 4 1 2 2. + 1 + <_> + + <_> + 6 4 9 21 -1. + <_> + 9 11 3 7 9. + 0 + <_> + + <_> + 6 4 9 6 -1. + <_> + 4 6 9 2 3. + 1 + <_> + + <_> + 6 5 3 9 -1. + <_> + 7 8 1 3 9. + 0 + <_> + + <_> + 6 5 9 3 -1. + <_> + 9 5 3 3 3. + 0 + <_> + + <_> + 6 5 6 16 -1. + <_> + 6 5 3 8 2. + <_> + 9 13 3 8 2. + 0 + <_> + + <_> + 6 5 3 18 -1. + <_> + 6 14 3 9 2. + 0 + <_> + + <_> + 6 5 8 14 -1. + <_> + 6 12 8 7 2. + 0 + <_> + + <_> + 6 5 9 6 -1. + <_> + 4 7 9 2 3. + 1 + <_> + + <_> + 6 6 2 18 -1. + <_> + 6 15 2 9 2. + 0 + <_> + + <_> + 6 7 2 14 -1. + <_> + 6 14 2 7 2. + 0 + <_> + + <_> + 6 7 6 10 -1. + <_> + 6 7 3 5 2. + <_> + 9 12 3 5 2. + 0 + <_> + + <_> + 6 7 6 14 -1. + <_> + 6 7 3 7 2. + <_> + 9 14 3 7 2. + 0 + <_> + + <_> + 6 7 6 17 -1. + <_> + 9 7 3 17 2. + 0 + <_> + + <_> + 6 7 4 15 -1. + <_> + 6 12 4 5 3. + 0 + <_> + + <_> + 6 7 8 18 -1. + <_> + 6 7 4 9 2. + <_> + 10 16 4 9 2. + 0 + <_> + + <_> + 6 7 5 4 -1. + <_> + 5 8 5 2 2. + 1 + <_> + + <_> + 6 7 8 12 -1. + <_> + 6 11 8 4 3. + 0 + <_> + + <_> + 6 8 2 12 -1. + <_> + 6 12 2 4 3. + 0 + <_> + + <_> + 6 8 4 16 -1. + <_> + 6 12 4 8 2. + 0 + <_> + + <_> + 6 8 9 4 -1. + <_> + 5 9 9 2 2. + 1 + <_> + + <_> + 6 9 2 4 -1. + <_> + 6 11 2 2 2. + 0 + <_> + + <_> + 6 9 9 15 -1. + <_> + 9 14 3 5 9. + 0 + <_> + + <_> + 6 9 8 8 -1. + <_> + 6 9 4 4 2. + <_> + 10 13 4 4 2. + 0 + <_> + + <_> + 6 10 2 6 -1. + <_> + 6 13 2 3 2. + 0 + <_> + + <_> + 6 11 3 9 -1. + <_> + 7 14 1 3 9. + 0 + <_> + + <_> + 6 11 8 11 -1. + <_> + 8 11 4 11 2. + 0 + <_> + + <_> + 6 12 3 6 -1. + <_> + 7 14 1 2 9. + 0 + <_> + + <_> + 6 13 3 6 -1. + <_> + 7 15 1 2 9. + 0 + <_> + + <_> + 6 13 3 12 -1. + <_> + 6 19 3 6 2. + 0 + <_> + + <_> + 6 13 8 12 -1. + <_> + 6 13 4 6 2. + <_> + 10 19 4 6 2. + 0 + <_> + + <_> + 6 14 3 2 -1. + <_> + 7 15 1 2 3. + 1 + <_> + + <_> + 6 14 9 1 -1. + <_> + 9 14 3 1 3. + 0 + <_> + + <_> + 6 14 8 1 -1. + <_> + 6 14 4 1 2. + 1 + <_> + + <_> + 6 14 5 6 -1. + <_> + 4 16 5 2 3. + 1 + <_> + + <_> + 6 15 6 6 -1. + <_> + 6 15 3 3 2. + <_> + 9 18 3 3 2. + 0 + <_> + + <_> + 6 15 6 4 -1. + <_> + 6 15 6 2 2. + 1 + <_> + + <_> + 6 16 4 5 -1. + <_> + 7 16 2 5 2. + 0 + <_> + + <_> + 6 16 8 4 -1. + <_> + 8 16 4 4 2. + 0 + <_> + + <_> + 6 17 2 4 -1. + <_> + 6 18 2 2 2. + 0 + <_> + + <_> + 6 17 2 8 -1. + <_> + 6 19 2 4 2. + 0 + <_> + + <_> + 6 18 4 2 -1. + <_> + 6 18 4 1 2. + 1 + <_> + + <_> + 6 18 5 6 -1. + <_> + 6 21 5 3 2. + 0 + <_> + + <_> + 6 20 7 4 -1. + <_> + 6 22 7 2 2. + 0 + <_> + + <_> + 6 23 8 2 -1. + <_> + 8 23 4 2 2. + 0 + <_> + + <_> + 7 0 1 8 -1. + <_> + 7 2 1 4 2. + 0 + <_> + + <_> + 7 0 3 3 -1. + <_> + 8 0 1 3 3. + 0 + <_> + + <_> + 7 0 4 5 -1. + <_> + 8 1 2 5 2. + 1 + <_> + + <_> + 7 0 4 2 -1. + <_> + 9 0 2 2 2. + 0 + <_> + + <_> + 7 0 8 6 -1. + <_> + 11 0 4 6 2. + 0 + <_> + + <_> + 7 0 8 4 -1. + <_> + 7 2 8 2 2. + 0 + <_> + + <_> + 7 1 6 12 -1. + <_> + 9 5 2 4 9. + 0 + <_> + + <_> + 7 1 6 15 -1. + <_> + 9 6 2 5 9. + 0 + <_> + + <_> + 7 1 3 4 -1. + <_> + 7 3 3 2 2. + 0 + <_> + + <_> + 7 1 8 4 -1. + <_> + 7 1 4 2 2. + <_> + 11 3 4 2 2. + 0 + <_> + + <_> + 7 1 8 24 -1. + <_> + 7 13 8 12 2. + 0 + <_> + + <_> + 7 2 6 8 -1. + <_> + 7 2 3 4 2. + <_> + 10 6 3 4 2. + 0 + <_> + + <_> + 7 3 4 4 -1. + <_> + 8 4 2 4 2. + 1 + <_> + + <_> + 7 3 6 3 -1. + <_> + 9 4 2 1 9. + 0 + <_> + + <_> + 7 3 6 4 -1. + <_> + 9 5 2 4 3. + 1 + <_> + + <_> + 7 3 5 16 -1. + <_> + 7 7 5 8 2. + 0 + <_> + + <_> + 7 4 1 21 -1. + <_> + 7 11 1 7 3. + 0 + <_> + + <_> + 7 4 6 4 -1. + <_> + 9 6 2 4 3. + 1 + <_> + + <_> + 7 4 8 1 -1. + <_> + 11 4 4 1 2. + 0 + <_> + + <_> + 7 4 7 16 -1. + <_> + 7 8 7 8 2. + 0 + <_> + + <_> + 7 5 2 8 -1. + <_> + 7 7 2 4 2. + 0 + <_> + + <_> + 7 5 6 5 -1. + <_> + 9 5 2 5 3. + 0 + <_> + + <_> + 7 5 6 14 -1. + <_> + 7 5 3 7 2. + <_> + 10 12 3 7 2. + 0 + <_> + + <_> + 7 5 6 18 -1. + <_> + 7 14 6 9 2. + 0 + <_> + + <_> + 7 6 4 1 -1. + <_> + 8 6 2 1 2. + 0 + <_> + + <_> + 7 6 8 1 -1. + <_> + 9 6 4 1 2. + 0 + <_> + + <_> + 7 6 2 16 -1. + <_> + 7 10 2 8 2. + 0 + <_> + + <_> + 7 6 8 6 -1. + <_> + 5 8 8 2 3. + 1 + <_> + + <_> + 7 7 8 1 -1. + <_> + 9 7 4 1 2. + 0 + <_> + + <_> + 7 7 2 8 -1. + <_> + 7 11 2 4 2. + 0 + <_> + + <_> + 7 7 8 2 -1. + <_> + 7 8 8 1 2. + 0 + <_> + + <_> + 7 8 6 9 -1. + <_> + 9 11 2 3 9. + 0 + <_> + + <_> + 7 8 8 6 -1. + <_> + 7 8 4 3 2. + <_> + 11 11 4 3 2. + 0 + <_> + + <_> + 7 9 8 6 -1. + <_> + 9 11 4 6 2. + 1 + <_> + + <_> + 7 10 6 2 -1. + <_> + 7 10 3 2 2. + 1 + <_> + + <_> + 7 10 8 3 -1. + <_> + 11 10 4 3 2. + 0 + <_> + + <_> + 7 10 8 9 -1. + <_> + 7 13 8 3 3. + 0 + <_> + + <_> + 7 11 2 2 -1. + <_> + 7 12 2 1 2. + 0 + <_> + + <_> + 7 11 6 12 -1. + <_> + 9 15 2 4 9. + 0 + <_> + + <_> + 7 11 8 2 -1. + <_> + 11 11 4 2 2. + 0 + <_> + + <_> + 7 11 8 9 -1. + <_> + 7 14 8 3 3. + 0 + <_> + + <_> + 7 12 8 4 -1. + <_> + 9 12 4 4 2. + 0 + <_> + + <_> + 7 12 8 1 -1. + <_> + 11 12 4 1 2. + 0 + <_> + + <_> + 7 12 6 6 -1. + <_> + 7 12 6 3 2. + 1 + <_> + + <_> + 7 12 7 3 -1. + <_> + 6 13 7 1 3. + 1 + <_> + + <_> + 7 13 1 8 -1. + <_> + 7 17 1 4 2. + 0 + <_> + + <_> + 7 13 6 3 -1. + <_> + 9 14 2 1 9. + 0 + <_> + + <_> + 7 13 8 9 -1. + <_> + 9 13 4 9 2. + 0 + <_> + + <_> + 7 13 6 12 -1. + <_> + 7 13 3 6 2. + <_> + 10 19 3 6 2. + 0 + <_> + + <_> + 7 13 5 3 -1. + <_> + 6 14 5 1 3. + 1 + <_> + + <_> + 7 13 6 3 -1. + <_> + 6 14 6 1 3. + 1 + <_> + + <_> + 7 14 2 4 -1. + <_> + 8 14 1 4 2. + 0 + <_> + + <_> + 7 14 8 1 -1. + <_> + 9 14 4 1 2. + 0 + <_> + + <_> + 7 14 8 4 -1. + <_> + 9 14 4 4 2. + 0 + <_> + + <_> + 7 14 8 6 -1. + <_> + 9 14 4 6 2. + 0 + <_> + + <_> + 7 14 6 5 -1. + <_> + 7 14 3 5 2. + 1 + <_> + + <_> + 7 15 2 6 -1. + <_> + 5 17 2 2 3. + 1 + <_> + + <_> + 7 15 6 8 -1. + <_> + 7 15 3 4 2. + <_> + 10 19 3 4 2. + 0 + <_> + + <_> + 7 15 6 10 -1. + <_> + 7 15 3 5 2. + <_> + 10 20 3 5 2. + 0 + <_> + + <_> + 7 15 4 6 -1. + <_> + 5 17 4 2 3. + 1 + <_> + + <_> + 7 16 3 6 -1. + <_> + 7 19 3 3 2. + 0 + <_> + + <_> + 7 16 3 8 -1. + <_> + 7 20 3 4 2. + 0 + <_> + + <_> + 7 16 5 4 -1. + <_> + 6 17 5 2 2. + 1 + <_> + + <_> + 7 17 6 6 -1. + <_> + 7 17 3 3 2. + <_> + 10 20 3 3 2. + 0 + <_> + + <_> + 7 17 8 8 -1. + <_> + 11 17 4 8 2. + 0 + <_> + + <_> + 7 18 2 1 -1. + <_> + 7 18 1 1 2. + 1 + <_> + + <_> + 7 18 8 3 -1. + <_> + 9 18 4 3 2. + 0 + <_> + + <_> + 7 19 2 1 -1. + <_> + 7 19 1 1 2. + 1 + <_> + + <_> + 7 19 8 1 -1. + <_> + 9 19 4 1 2. + 0 + <_> + + <_> + 7 19 8 6 -1. + <_> + 7 19 4 3 2. + <_> + 11 22 4 3 2. + 0 + <_> + + <_> + 7 21 6 2 -1. + <_> + 7 22 6 1 2. + 0 + <_> + + <_> + 7 21 7 4 -1. + <_> + 7 23 7 2 2. + 0 + <_> + + <_> + 7 22 8 3 -1. + <_> + 7 23 8 1 3. + 0 + <_> + + <_> + 7 23 4 1 -1. + <_> + 9 23 2 1 2. + 0 + <_> + + <_> + 7 23 6 2 -1. + <_> + 9 23 2 2 3. + 0 + <_> + + <_> + 8 0 4 3 -1. + <_> + 9 0 2 3 2. + 0 + <_> + + <_> + 8 0 3 4 -1. + <_> + 9 0 1 4 3. + 0 + <_> + + <_> + 8 0 6 1 -1. + <_> + 11 0 3 1 2. + 0 + <_> + + <_> + 8 0 7 2 -1. + <_> + 8 1 7 1 2. + 0 + <_> + + <_> + 8 1 4 1 -1. + <_> + 9 1 2 1 2. + 0 + <_> + + <_> + 8 1 1 6 -1. + <_> + 8 3 1 2 3. + 0 + <_> + + <_> + 8 1 6 6 -1. + <_> + 10 3 2 6 3. + 1 + <_> + + <_> + 8 1 4 20 -1. + <_> + 8 1 2 10 2. + <_> + 10 11 2 10 2. + 0 + <_> + + <_> + 8 2 5 16 -1. + <_> + 8 10 5 8 2. + 0 + <_> + + <_> + 8 3 3 1 -1. + <_> + 9 3 1 1 3. + 0 + <_> + + <_> + 8 3 6 3 -1. + <_> + 10 4 2 1 9. + 0 + <_> + + <_> + 8 3 4 4 -1. + <_> + 8 3 2 2 2. + <_> + 10 5 2 2 2. + 0 + <_> + + <_> + 8 4 4 5 -1. + <_> + 9 5 2 5 2. + 1 + <_> + + <_> + 8 4 2 2 -1. + <_> + 8 5 2 1 2. + 0 + <_> + + <_> + 8 4 4 4 -1. + <_> + 8 4 2 2 2. + <_> + 10 6 2 2 2. + 0 + <_> + + <_> + 8 4 6 4 -1. + <_> + 10 6 2 4 3. + 1 + <_> + + <_> + 8 5 1 6 -1. + <_> + 8 5 1 3 2. + 1 + <_> + + <_> + 8 5 4 6 -1. + <_> + 8 5 4 3 2. + 1 + <_> + + <_> + 8 6 3 8 -1. + <_> + 6 8 3 4 2. + 1 + <_> + + <_> + 8 7 3 1 -1. + <_> + 9 7 1 1 3. + 0 + <_> + + <_> + 8 7 3 12 -1. + <_> + 9 11 1 4 9. + 0 + <_> + + <_> + 8 7 2 8 -1. + <_> + 8 11 2 4 2. + 0 + <_> + + <_> + 8 7 6 18 -1. + <_> + 10 13 2 6 9. + 0 + <_> + + <_> + 8 10 1 2 -1. + <_> + 8 10 1 1 2. + 1 + <_> + + <_> + 8 10 4 1 -1. + <_> + 10 10 2 1 2. + 0 + <_> + + <_> + 8 11 7 8 -1. + <_> + 8 13 7 4 2. + 0 + <_> + + <_> + 8 13 4 2 -1. + <_> + 10 13 2 2 2. + 0 + <_> + + <_> + 8 14 5 3 -1. + <_> + 7 15 5 1 3. + 1 + <_> + + <_> + 8 15 4 1 -1. + <_> + 9 15 2 1 2. + 0 + <_> + + <_> + 8 15 2 2 -1. + <_> + 9 15 1 2 2. + 0 + <_> + + <_> + 8 15 4 2 -1. + <_> + 9 16 2 2 2. + 1 + <_> + + <_> + 8 15 4 10 -1. + <_> + 8 15 2 5 2. + <_> + 10 20 2 5 2. + 0 + <_> + + <_> + 8 17 4 1 -1. + <_> + 9 18 2 1 2. + 1 + <_> + + <_> + 8 17 4 6 -1. + <_> + 8 17 2 3 2. + <_> + 10 20 2 3 2. + 0 + <_> + + <_> + 8 17 7 8 -1. + <_> + 8 21 7 4 2. + 0 + <_> + + <_> + 8 18 4 4 -1. + <_> + 8 18 2 2 2. + <_> + 10 20 2 2 2. + 0 + <_> + + <_> + 8 18 6 6 -1. + <_> + 10 20 2 2 9. + 0 + <_> + + <_> + 8 19 7 6 -1. + <_> + 8 22 7 3 2. + 0 + <_> + + <_> + 8 20 4 1 -1. + <_> + 10 20 2 1 2. + 0 + <_> + + <_> + 8 21 7 4 -1. + <_> + 8 23 7 2 2. + 0 + <_> + + <_> + 8 22 3 3 -1. + <_> + 9 22 1 3 3. + 0 + <_> + + <_> + 8 23 6 2 -1. + <_> + 11 23 3 2 2. + 0 + <_> + + <_> + 8 24 6 1 -1. + <_> + 11 24 3 1 2. + 0 + <_> + + <_> + 9 0 4 2 -1. + <_> + 10 0 2 2 2. + 0 + <_> + + <_> + 9 0 4 6 -1. + <_> + 11 0 2 6 2. + 0 + <_> + + <_> + 9 0 6 6 -1. + <_> + 9 0 3 3 2. + <_> + 12 3 3 3 2. + 0 + <_> + + <_> + 9 0 4 4 -1. + <_> + 9 1 4 2 2. + 0 + <_> + + <_> + 9 0 6 2 -1. + <_> + 9 0 6 1 2. + 1 + <_> + + <_> + 9 1 3 1 -1. + <_> + 10 1 1 1 3. + 0 + <_> + + <_> + 9 1 3 5 -1. + <_> + 10 2 1 5 3. + 1 + <_> + + <_> + 9 1 2 3 -1. + <_> + 8 2 2 1 3. + 1 + <_> + + <_> + 9 1 6 6 -1. + <_> + 9 1 3 3 2. + <_> + 12 4 3 3 2. + 0 + <_> + + <_> + 9 2 5 3 -1. + <_> + 8 3 5 1 3. + 1 + <_> + + <_> + 9 3 6 5 -1. + <_> + 9 3 3 5 2. + 1 + <_> + + <_> + 9 4 4 5 -1. + <_> + 10 5 2 5 2. + 1 + <_> + + <_> + 9 5 4 14 -1. + <_> + 9 5 2 7 2. + <_> + 11 12 2 7 2. + 0 + <_> + + <_> + 9 6 2 2 -1. + <_> + 10 6 1 2 2. + 0 + <_> + + <_> + 9 7 2 1 -1. + <_> + 10 7 1 1 2. + 0 + <_> + + <_> + 9 10 2 2 -1. + <_> + 10 10 1 2 2. + 0 + <_> + + <_> + 9 12 3 1 -1. + <_> + 10 13 1 1 3. + 1 + <_> + + <_> + 9 13 3 1 -1. + <_> + 10 14 1 1 3. + 1 + <_> + + <_> + 9 13 6 6 -1. + <_> + 9 15 6 2 3. + 0 + <_> + + <_> + 9 15 5 8 -1. + <_> + 9 19 5 4 2. + 0 + <_> + + <_> + 9 16 3 1 -1. + <_> + 10 17 1 1 3. + 1 + <_> + + <_> + 9 16 4 6 -1. + <_> + 9 16 2 3 2. + <_> + 11 19 2 3 2. + 0 + <_> + + <_> + 9 16 3 3 -1. + <_> + 8 17 3 1 3. + 1 + <_> + + <_> + 9 17 1 4 -1. + <_> + 8 18 1 2 2. + 1 + <_> + + <_> + 9 17 3 4 -1. + <_> + 10 18 1 4 3. + 1 + <_> + + <_> + 9 17 4 6 -1. + <_> + 9 17 2 3 2. + <_> + 11 20 2 3 2. + 0 + <_> + + <_> + 9 17 6 8 -1. + <_> + 9 17 3 4 2. + <_> + 12 21 3 4 2. + 0 + <_> + + <_> + 9 18 2 1 -1. + <_> + 9 18 1 1 2. + 1 + <_> + + <_> + 9 18 3 4 -1. + <_> + 10 19 1 4 3. + 1 + <_> + + <_> + 9 18 6 7 -1. + <_> + 12 18 3 7 2. + 0 + <_> + + <_> + 9 19 3 6 -1. + <_> + 10 19 1 6 3. + 0 + <_> + + <_> + 9 23 6 2 -1. + <_> + 11 23 2 2 3. + 0 + <_> + + <_> + 9 23 6 1 -1. + <_> + 12 23 3 1 2. + 0 + <_> + + <_> + 9 24 6 1 -1. + <_> + 12 24 3 1 2. + 0 + <_> + + <_> + 10 0 4 2 -1. + <_> + 11 0 2 2 2. + 0 + <_> + + <_> + 10 0 3 3 -1. + <_> + 11 0 1 3 3. + 0 + <_> + + <_> + 10 0 2 6 -1. + <_> + 10 3 2 3 2. + 0 + <_> + + <_> + 10 0 4 6 -1. + <_> + 10 0 2 3 2. + <_> + 12 3 2 3 2. + 0 + <_> + + <_> + 10 0 4 24 -1. + <_> + 12 0 2 24 2. + 0 + <_> + + <_> + 10 0 3 3 -1. + <_> + 9 1 3 1 3. + 1 + <_> + + <_> + 10 0 4 3 -1. + <_> + 9 1 4 1 3. + 1 + <_> + + <_> + 10 0 5 2 -1. + <_> + 10 1 5 1 2. + 0 + <_> + + <_> + 10 0 5 2 -1. + <_> + 10 0 5 1 2. + 1 + <_> + + <_> + 10 0 5 3 -1. + <_> + 9 1 5 1 3. + 1 + <_> + + <_> + 10 0 5 18 -1. + <_> + 10 9 5 9 2. + 0 + <_> + + <_> + 10 1 1 15 -1. + <_> + 10 6 1 5 3. + 0 + <_> + + <_> + 10 2 4 21 -1. + <_> + 12 2 2 21 2. + 0 + <_> + + <_> + 10 3 4 19 -1. + <_> + 12 3 2 19 2. + 0 + <_> + + <_> + 10 4 4 2 -1. + <_> + 12 4 2 2 2. + 0 + <_> + + <_> + 10 5 1 20 -1. + <_> + 10 10 1 10 2. + 0 + <_> + + <_> + 10 6 2 3 -1. + <_> + 9 7 2 1 3. + 1 + <_> + + <_> + 10 6 2 14 -1. + <_> + 10 13 2 7 2. + 0 + <_> + + <_> + 10 6 4 12 -1. + <_> + 10 9 4 6 2. + 0 + <_> + + <_> + 10 7 5 8 -1. + <_> + 10 9 5 4 2. + 0 + <_> + + <_> + 10 12 1 2 -1. + <_> + 10 12 1 1 2. + 1 + <_> + + <_> + 10 12 4 8 -1. + <_> + 10 14 4 4 2. + 0 + <_> + + <_> + 10 16 1 6 -1. + <_> + 10 16 1 3 2. + 1 + <_> + + <_> + 10 16 3 3 -1. + <_> + 11 17 1 3 3. + 1 + <_> + + <_> + 10 17 2 3 -1. + <_> + 10 17 1 3 2. + 1 + <_> + + <_> + 10 19 3 2 -1. + <_> + 11 20 1 2 3. + 1 + <_> + + <_> + 10 20 4 4 -1. + <_> + 10 20 2 2 2. + <_> + 12 22 2 2 2. + 0 + <_> + + <_> + 10 21 5 4 -1. + <_> + 10 23 5 2 2. + 0 + <_> + + <_> + 10 22 5 3 -1. + <_> + 10 23 5 1 3. + 0 + <_> + + <_> + 10 24 4 1 -1. + <_> + 11 24 2 1 2. + 0 + <_> + + <_> + 11 0 3 5 -1. + <_> + 12 0 1 5 3. + 0 + <_> + + <_> + 11 0 3 7 -1. + <_> + 12 0 1 7 3. + 0 + <_> + + <_> + 11 0 2 23 -1. + <_> + 12 0 1 23 2. + 0 + <_> + + <_> + 11 0 4 1 -1. + <_> + 11 0 2 1 2. + 1 + <_> + + <_> + 11 0 3 3 -1. + <_> + 10 1 3 1 3. + 1 + <_> + + <_> + 11 0 3 4 -1. + <_> + 10 1 3 2 2. + 1 + <_> + + <_> + 11 0 4 3 -1. + <_> + 10 1 4 1 3. + 1 + <_> + + <_> + 11 0 4 4 -1. + <_> + 10 1 4 2 2. + 1 + <_> + + <_> + 11 0 4 6 -1. + <_> + 9 2 4 2 3. + 1 + <_> + + <_> + 11 1 3 4 -1. + <_> + 10 2 3 2 2. + 1 + <_> + + <_> + 11 1 4 2 -1. + <_> + 11 2 4 1 2. + 0 + <_> + + <_> + 11 2 4 20 -1. + <_> + 12 2 2 20 2. + 0 + <_> + + <_> + 11 3 3 4 -1. + <_> + 12 4 1 4 3. + 1 + <_> + + <_> + 11 9 4 8 -1. + <_> + 12 10 2 8 2. + 1 + <_> + + <_> + 11 9 2 4 -1. + <_> + 10 10 2 2 2. + 1 + <_> + + <_> + 11 9 4 8 -1. + <_> + 11 11 4 4 2. + 0 + <_> + + <_> + 11 10 4 15 -1. + <_> + 11 15 4 5 3. + 0 + <_> + + <_> + 11 11 1 8 -1. + <_> + 9 13 1 4 2. + 1 + <_> + + <_> + 11 11 4 3 -1. + <_> + 12 12 2 3 2. + 1 + <_> + + <_> + 11 11 4 10 -1. + <_> + 12 12 2 10 2. + 1 + <_> + + <_> + 11 14 4 8 -1. + <_> + 12 14 2 8 2. + 0 + <_> + + <_> + 11 14 4 2 -1. + <_> + 11 15 4 1 2. + 0 + <_> + + <_> + 11 16 4 5 -1. + <_> + 12 17 2 5 2. + 1 + <_> + + <_> + 11 17 1 6 -1. + <_> + 11 17 1 3 2. + 1 + <_> + + <_> + 11 17 3 4 -1. + <_> + 12 18 1 4 3. + 1 + <_> + + <_> + 11 19 3 3 -1. + <_> + 12 20 1 3 3. + 1 + <_> + + <_> + 11 19 2 6 -1. + <_> + 12 19 1 6 2. + 0 + <_> + + <_> + 11 19 4 2 -1. + <_> + 11 19 2 1 2. + <_> + 13 20 2 1 2. + 0 + <_> + + <_> + 11 20 2 2 -1. + <_> + 12 20 1 2 2. + 0 + <_> + + <_> + 11 22 2 3 -1. + <_> + 11 23 2 1 3. + 0 + <_> + + <_> + 11 24 4 1 -1. + <_> + 12 24 2 1 2. + 0 + <_> + + <_> + 12 0 3 7 -1. + <_> + 13 0 1 7 3. + 0 + <_> + + <_> + 12 0 3 21 -1. + <_> + 13 0 1 21 3. + 0 + <_> + + <_> + 12 0 3 24 -1. + <_> + 13 0 1 24 3. + 0 + <_> + + <_> + 12 0 3 4 -1. + <_> + 12 2 3 2 2. + 0 + <_> + + <_> + 12 0 3 6 -1. + <_> + 10 2 3 2 3. + 1 + <_> + + <_> + 12 0 3 6 -1. + <_> + 12 3 3 3 2. + 0 + <_> + + <_> + 12 1 3 3 -1. + <_> + 13 2 1 1 9. + 0 + <_> + + <_> + 12 1 3 9 -1. + <_> + 13 4 1 3 9. + 0 + <_> + + <_> + 12 1 3 23 -1. + <_> + 13 1 1 23 3. + 0 + <_> + + <_> + 12 1 2 6 -1. + <_> + 10 3 2 2 3. + 1 + <_> + + <_> + 12 1 3 6 -1. + <_> + 10 3 3 2 3. + 1 + <_> + + <_> + 12 1 3 12 -1. + <_> + 9 4 3 6 2. + 1 + <_> + + <_> + 12 2 1 8 -1. + <_> + 10 4 1 4 2. + 1 + <_> + + <_> + 12 2 3 9 -1. + <_> + 13 5 1 3 9. + 0 + <_> + + <_> + 12 6 1 4 -1. + <_> + 11 7 1 2 2. + 1 + <_> + + <_> + 12 8 3 4 -1. + <_> + 12 9 3 2 2. + 0 + <_> + + <_> + 12 10 1 2 -1. + <_> + 12 10 1 1 2. + 1 + <_> + + <_> + 12 10 3 12 -1. + <_> + 13 14 1 4 9. + 0 + <_> + + <_> + 12 10 2 8 -1. + <_> + 12 10 1 8 2. + 1 + <_> + + <_> + 12 10 3 4 -1. + <_> + 11 11 3 2 2. + 1 + <_> + + <_> + 12 11 1 8 -1. + <_> + 10 13 1 4 2. + 1 + <_> + + <_> + 12 13 1 10 -1. + <_> + 12 13 1 5 2. + 1 + <_> + + <_> + 12 16 3 5 -1. + <_> + 13 16 1 5 3. + 0 + <_> + + <_> + 12 17 3 3 -1. + <_> + 13 18 1 3 3. + 1 + <_> + + <_> + 12 17 3 5 -1. + <_> + 13 18 1 5 3. + 1 + <_> + + <_> + 12 18 3 4 -1. + <_> + 13 19 1 4 3. + 1 + <_> + + <_> + 12 19 3 2 -1. + <_> + 12 20 3 1 2. + 0 + <_> + + <_> + 12 19 3 6 -1. + <_> + 12 21 3 2 3. + 0 + <_> + + <_> + 13 0 1 14 -1. + <_> + 13 7 1 7 2. + 0 + <_> + + <_> + 13 0 2 21 -1. + <_> + 14 0 1 21 2. + 0 + <_> + + <_> + 13 0 2 8 -1. + <_> + 13 2 2 4 2. + 0 + <_> + + <_> + 13 0 2 8 -1. + <_> + 13 4 2 4 2. + 0 + <_> + + <_> + 13 0 2 24 -1. + <_> + 13 12 2 12 2. + 0 + <_> + + <_> + 13 1 1 2 -1. + <_> + 13 2 1 1 2. + 0 + <_> + + <_> + 13 1 1 3 -1. + <_> + 13 2 1 1 3. + 0 + <_> + + <_> + 13 1 1 4 -1. + <_> + 13 2 1 2 2. + 0 + <_> + + <_> + 13 1 2 1 -1. + <_> + 13 1 1 1 2. + 1 + <_> + + <_> + 13 1 1 4 -1. + <_> + 12 2 1 2 2. + 1 + <_> + + <_> + 13 1 1 8 -1. + <_> + 13 3 1 4 2. + 0 + <_> + + <_> + 13 1 2 17 -1. + <_> + 14 1 1 17 2. + 0 + <_> + + <_> + 13 1 2 23 -1. + <_> + 14 1 1 23 2. + 0 + <_> + + <_> + 13 1 2 9 -1. + <_> + 13 4 2 3 3. + 0 + <_> + + <_> + 13 2 1 9 -1. + <_> + 13 5 1 3 3. + 0 + <_> + + <_> + 13 2 1 20 -1. + <_> + 13 7 1 10 2. + 0 + <_> + + <_> + 13 2 1 14 -1. + <_> + 13 9 1 7 2. + 0 + <_> + + <_> + 13 2 2 22 -1. + <_> + 14 2 1 22 2. + 0 + <_> + + <_> + 13 3 1 3 -1. + <_> + 13 4 1 1 3. + 0 + <_> + + <_> + 13 3 1 4 -1. + <_> + 13 5 1 2 2. + 0 + <_> + + <_> + 13 3 1 8 -1. + <_> + 13 5 1 4 2. + 0 + <_> + + <_> + 13 3 1 20 -1. + <_> + 13 8 1 10 2. + 0 + <_> + + <_> + 13 3 1 18 -1. + <_> + 13 9 1 6 3. + 0 + <_> + + <_> + 13 4 1 4 -1. + <_> + 13 5 1 2 2. + 0 + <_> + + <_> + 13 4 1 10 -1. + <_> + 13 9 1 5 2. + 0 + <_> + + <_> + 13 4 1 18 -1. + <_> + 13 10 1 6 3. + 0 + <_> + + <_> + 13 4 1 12 -1. + <_> + 13 4 1 6 2. + 1 + <_> + + <_> + 13 4 2 3 -1. + <_> + 13 5 2 1 3. + 0 + <_> + + <_> + 13 4 2 4 -1. + <_> + 13 5 2 2 2. + 0 + <_> + + <_> + 13 5 1 6 -1. + <_> + 13 8 1 3 2. + 0 + <_> + + <_> + 13 5 1 16 -1. + <_> + 13 9 1 8 2. + 0 + <_> + + <_> + 13 6 1 12 -1. + <_> + 13 10 1 4 3. + 0 + <_> + + <_> + 13 7 1 6 -1. + <_> + 13 9 1 2 3. + 0 + <_> + + <_> + 13 7 1 14 -1. + <_> + 13 14 1 7 2. + 0 + <_> + + <_> + 13 8 1 6 -1. + <_> + 13 10 1 2 3. + 0 + <_> + + <_> + 13 9 1 2 -1. + <_> + 13 10 1 1 2. + 0 + <_> + + <_> + 13 9 1 8 -1. + <_> + 13 11 1 4 2. + 0 + <_> + + <_> + 13 9 1 6 -1. + <_> + 11 11 1 2 3. + 1 + <_> + + <_> + 13 9 1 14 -1. + <_> + 13 16 1 7 2. + 0 + <_> + + <_> + 13 10 1 8 -1. + <_> + 13 12 1 4 2. + 0 + <_> + + <_> + 13 10 2 8 -1. + <_> + 13 10 1 8 2. + 1 + <_> + + <_> + 13 11 1 10 -1. + <_> + 13 16 1 5 2. + 0 + <_> + + <_> + 13 11 2 10 -1. + <_> + 13 16 2 5 2. + 0 + <_> + + <_> + 13 12 1 12 -1. + <_> + 13 12 1 6 2. + 1 + <_> + + <_> + 13 13 1 4 -1. + <_> + 13 14 1 2 2. + 0 + <_> + + <_> + 13 14 1 6 -1. + <_> + 13 17 1 3 2. + 0 + <_> + + <_> + 13 15 2 10 -1. + <_> + 13 20 2 5 2. + 0 + <_> + + <_> + 13 17 2 8 -1. + <_> + 13 21 2 4 2. + 0 + <_> + + <_> + 13 19 1 6 -1. + <_> + 13 22 1 3 2. + 0 + <_> + + <_> + 13 20 2 4 -1. + <_> + 13 22 2 2 2. + 0 + <_> + + <_> + 14 8 1 2 -1. + <_> + 14 9 1 1 2. + 0 + <_> + + <_> + 14 13 1 12 -1. + <_> + 14 19 1 6 2. + 0 + <_> + + <_> + 14 14 1 10 -1. + <_> + 14 14 1 5 2. + 1 + <_> + + <_> + 14 15 1 6 -1. + <_> + 14 17 1 2 3. + 0 + diff --git a/ocr_test/rec_test/char/cifar10_quick_iter_2030578.solverstate b/ocr_test/rec_test/char/cifar10_quick_iter_2030578.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..8bdbcc291d57b7916fe04ed6e2c0479b3dafe11f Binary files /dev/null and b/ocr_test/rec_test/char/cifar10_quick_iter_2030578.solverstate differ diff --git a/ocr_test/rec_test/char/cifar10_quick_train_test.prototxt b/ocr_test/rec_test/char/cifar10_quick_train_test.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..8b06622ae24b30e0afe8fa5472686fc3e131e492 --- /dev/null +++ b/ocr_test/rec_test/char/cifar10_quick_train_test.prototxt @@ -0,0 +1,247 @@ +name: "CIFAR10_quick" +layer { + name: "cifar" + type: "Data" + top: "data" + top: "label" + include { + phase: TRAIN + } + transform_param { + scale: 0.00390625 + } + data_param { + source: "imagenet_train_leveldb" + batch_size: 1024 + backend: LEVELDB + } +} +layer { + name: "cifar" + type: "Data" + top: "data" + top: "label" + include { + phase: TEST + } + transform_param { + scale: 0.00390625 + } + data_param { + source: "imagenet_test_leveldb" + batch_size: 1024 + backend: LEVELDB + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 20 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.0001 + } + bias_filler { + type: "constant" + } + } +} + +layer { + name: "bn1" + type: "BatchNorm" + bottom: "conv1" + top: "bn1" + + batch_norm_param { + use_global_stats: false + } +} + +layer { + name: "conv1_scale" + type: "Scale" + bottom: "bn1" + top: "bn1" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn1" + top: "bn1" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "bn1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} + +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 50 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn2" + type: "BatchNorm" + bottom: "conv2" + top: "bn2" + batch_norm_param { + use_global_stats: false + } +} + +layer { + name: "conv2_scale" + type: "Scale" + bottom: "bn2" + top: "bn2" + scale_param { + bias_term: true + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "bn2" + top: "bn2" +} + +layer { + name: "pool2" + type: "Pooling" + bottom: "bn2" + top: "pool2" + pooling_param { + pool: AVE + kernel_size: 3 + stride: 2 + } +} +layer { + name: "ip1" + type: "InnerProduct" + bottom: "pool2" + top: "ip1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 500 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn3" + type: "BatchNorm" + bottom: "ip1" + top: "bn3" + + batch_norm_param { + use_global_stats: false + } +} +layer { + name: "ip1_scale" + type: "Scale" + bottom: "bn3" + top: "bn3" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn3" + top: "bn3" +} +layer { + name: "ip2" + type: "InnerProduct" + bottom: "bn3" + top: "ip2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 37 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "accuracy" + type: "Accuracy" + bottom: "ip2" + bottom: "label" + top: "accuracy" + include { + phase: TEST + } +} +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "ip2" + bottom: "label" + top: "loss" +} diff --git a/ocr_test/rec_test/char/model.caffemodel b/ocr_test/rec_test/char/model.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..0afcd306645bb87ca5f4cc3bf8b7d1724fad5731 Binary files /dev/null and b/ocr_test/rec_test/char/model.caffemodel differ diff --git a/ocr_test/rec_test/char/model.prototxt b/ocr_test/rec_test/char/model.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..1392387f52468a79cf6f905ed445bf5209e64b47 --- /dev/null +++ b/ocr_test/rec_test/char/model.prototxt @@ -0,0 +1,207 @@ +name: "CIFAR10_quick_test" +input: "data" +input_dim: 1 +input_dim: 1 +input_dim: 35 +input_dim: 21 +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 20 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.0001 + } + bias_filler { + type: "constant" + } + } +} + +layer { + name: "bn1" + type: "BatchNorm" + bottom: "conv1" + top: "bn1" + + batch_norm_param { + use_global_stats: true + } +} + +layer { + name: "conv1_scale" + type: "Scale" + bottom: "bn1" + top: "bn1" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn1" + top: "bn1" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "bn1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} + +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 50 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn2" + type: "BatchNorm" + bottom: "conv2" + top: "bn2" + batch_norm_param { + use_global_stats: true + } +} + +layer { + name: "conv2_scale" + type: "Scale" + bottom: "bn2" + top: "bn2" + scale_param { + bias_term: true + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "bn2" + top: "bn2" +} + +layer { + name: "pool2" + type: "Pooling" + bottom: "bn2" + top: "pool2" + pooling_param { + pool: AVE + kernel_size: 3 + stride: 2 + } +} +layer { + name: "ip1" + type: "InnerProduct" + bottom: "pool2" + top: "ip1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 500 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn3" + type: "BatchNorm" + bottom: "ip1" + top: "bn3" + + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "ip1_scale" + type: "Scale" + bottom: "bn3" + top: "bn3" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn3" + top: "bn3" +} +layer { + name: "ip2" + type: "InnerProduct" + bottom: "bn3" + top: "ip2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 37 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "prob" + type: "Softmax" + bottom: "ip2" + top: "prob" +} diff --git a/ocr_test/rec_test/chinese/cifar10_quick_iter_1009313.solverstate b/ocr_test/rec_test/chinese/cifar10_quick_iter_1009313.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..01fc1fe34522f5544d6f04e0a79a0ead141286f9 Binary files /dev/null and b/ocr_test/rec_test/chinese/cifar10_quick_iter_1009313.solverstate differ diff --git a/ocr_test/rec_test/chinese/cifar10_quick_train_test.prototxt b/ocr_test/rec_test/chinese/cifar10_quick_train_test.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..2922203815302b8ff90c732067b8a77aa1bcf888 --- /dev/null +++ b/ocr_test/rec_test/chinese/cifar10_quick_train_test.prototxt @@ -0,0 +1,247 @@ +name: "CIFAR10_quick" +layer { + name: "cifar" + type: "Data" + top: "data" + top: "label" + include { + phase: TRAIN + } + transform_param { + scale: 0.00390625 + } + data_param { + source: "imagenet_train_leveldb" + batch_size: 1024 + backend: LEVELDB + } +} +layer { + name: "cifar" + type: "Data" + top: "data" + top: "label" + include { + phase: TEST + } + transform_param { + scale: 0.00390625 + } + data_param { + source: "imagenet_test_leveldb" + batch_size: 1024 + backend: LEVELDB + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 20 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.0001 + } + bias_filler { + type: "constant" + } + } +} + +layer { + name: "bn1" + type: "BatchNorm" + bottom: "conv1" + top: "bn1" + + batch_norm_param { + use_global_stats: false + } +} + +layer { + name: "conv1_scale" + type: "Scale" + bottom: "bn1" + top: "bn1" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn1" + top: "bn1" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "bn1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} + +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 50 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn2" + type: "BatchNorm" + bottom: "conv2" + top: "bn2" + batch_norm_param { + use_global_stats: false + } +} + +layer { + name: "conv2_scale" + type: "Scale" + bottom: "bn2" + top: "bn2" + scale_param { + bias_term: true + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "bn2" + top: "bn2" +} + +layer { + name: "pool2" + type: "Pooling" + bottom: "bn2" + top: "pool2" + pooling_param { + pool: AVE + kernel_size: 3 + stride: 2 + } +} +layer { + name: "ip1" + type: "InnerProduct" + bottom: "pool2" + top: "ip1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 500 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn3" + type: "BatchNorm" + bottom: "ip1" + top: "bn3" + + batch_norm_param { + use_global_stats: false + } +} +layer { + name: "ip1_scale" + type: "Scale" + bottom: "bn3" + top: "bn3" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn3" + top: "bn3" +} +layer { + name: "ip2" + type: "InnerProduct" + bottom: "bn3" + top: "ip2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 31 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "accuracy" + type: "Accuracy" + bottom: "ip2" + bottom: "label" + top: "accuracy" + include { + phase: TEST + } +} +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "ip2" + bottom: "label" + top: "loss" +} diff --git a/ocr_test/rec_test/chinese/model.caffemodel b/ocr_test/rec_test/chinese/model.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..6708222d572523ebb16731e2600dac026280771b Binary files /dev/null and b/ocr_test/rec_test/chinese/model.caffemodel differ diff --git a/ocr_test/rec_test/chinese/model.prototxt b/ocr_test/rec_test/chinese/model.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..78b80a16a01f8463dfae84212f6972fe69f6ff21 --- /dev/null +++ b/ocr_test/rec_test/chinese/model.prototxt @@ -0,0 +1,207 @@ +name: "CIFAR10_quick_test" +input: "data" +input_dim: 1 +input_dim: 1 +input_dim: 35 +input_dim: 21 +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 20 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.0001 + } + bias_filler { + type: "constant" + } + } +} + +layer { + name: "bn1" + type: "BatchNorm" + bottom: "conv1" + top: "bn1" + + batch_norm_param { + use_global_stats: true + } +} + +layer { + name: "conv1_scale" + type: "Scale" + bottom: "bn1" + top: "bn1" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn1" + top: "bn1" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "bn1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} + +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 50 + pad: 2 + kernel_size: 5 + stride: 1 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn2" + type: "BatchNorm" + bottom: "conv2" + top: "bn2" + batch_norm_param { + use_global_stats: true + } +} + +layer { + name: "conv2_scale" + type: "Scale" + bottom: "bn2" + top: "bn2" + scale_param { + bias_term: true + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "bn2" + top: "bn2" +} + +layer { + name: "pool2" + type: "Pooling" + bottom: "bn2" + top: "pool2" + pooling_param { + pool: AVE + kernel_size: 3 + stride: 2 + } +} +layer { + name: "ip1" + type: "InnerProduct" + bottom: "pool2" + top: "ip1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 500 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "bn3" + type: "BatchNorm" + bottom: "ip1" + top: "bn3" + + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "ip1_scale" + type: "Scale" + bottom: "bn3" + top: "bn3" + scale_param { + bias_term: true + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "bn3" + top: "bn3" +} +layer { + name: "ip2" + type: "InnerProduct" + bottom: "bn3" + top: "ip2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 31 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "prob" + type: "Softmax" + bottom: "ip2" + top: "prob" +} diff --git a/ocr_test/stdafx.cpp b/ocr_test/stdafx.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a1008bcfef5c0101afa159668e1b3983a5533f1c --- /dev/null +++ b/ocr_test/stdafx.cpp @@ -0,0 +1,8 @@ +// stdafx.cpp : ֻ׼ļԴļ +// ocr_test.pch ΪԤͷ +// stdafx.obj ԤϢ + +#include "stdafx.h" + +// TODO: STDAFX.H κĸͷļ +//ڴļ diff --git a/ocr_test/stdafx.h b/ocr_test/stdafx.h new file mode 100644 index 0000000000000000000000000000000000000000..baa4bbc621f3abfbc1726da79a84e54dbd5309f8 --- /dev/null +++ b/ocr_test/stdafx.h @@ -0,0 +1,15 @@ +// stdafx.h : ׼ϵͳļİļ +// Ǿʹõĵ +// ضĿİļ +// + +#pragma once + +#include "targetver.h" + +#include +#include + + + +// TODO: ڴ˴óҪͷļ diff --git a/ocr_test/targetver.h b/ocr_test/targetver.h new file mode 100644 index 0000000000000000000000000000000000000000..416cebf89fd0df8216693b65bba536a1436754e7 --- /dev/null +++ b/ocr_test/targetver.h @@ -0,0 +1,8 @@ +#pragma once + +// SDKDDKVer.h õ߰汾 Windows ƽ̨ + +// ҪΪǰ Windows ƽ̨Ӧó WinSDKVer.h +// _WIN32_WINNT ΪҪֵ֧ƽ̨Ȼٰ SDKDDKVer.h + +#include diff --git a/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_1303038.caffemodel b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_1303038.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..aa4704627999324b1b9d69d0a4c7cef94af4d049 Binary files /dev/null and b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_1303038.caffemodel differ diff --git a/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_1303038.solverstate b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_1303038.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..22e1be888a564ca1ea4f9551312dbfd6e8ee4136 Binary files /dev/null and b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_1303038.solverstate differ diff --git a/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_250043.caffemodel b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_250043.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..c80f4ffb5abfc4957d822299b95a9c890e7e89da Binary files /dev/null and b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_250043.caffemodel differ diff --git a/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_250043.solverstate b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_250043.solverstate new file mode 100644 index 0000000000000000000000000000000000000000..665191f760fedd7dc7c75cb0024d649d3c71f03e Binary files /dev/null and b/ocr_test/wm_test/densenet-bigger-5x5-no-lstm_iter_250043.solverstate differ diff --git a/ocr_test/wm_test/densenet-sum-blstm-full-res-blstm_train-val.prototxt b/ocr_test/wm_test/densenet-sum-blstm-full-res-blstm_train-val.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..ff3b9f1a19f244ec7c8404fa01e4ea87e725e8ce --- /dev/null +++ b/ocr_test/wm_test/densenet-sum-blstm-full-res-blstm_train-val.prototxt @@ -0,0 +1,574 @@ + + +layer { + name: "data" + type: "ImageData" + #type: "Data" + top: "data" + top: "label" + include { + phase: TRAIN + } + transform_param { + mean_value:152 + mean_value:152 + mean_value:152 + } + image_data_param { + #data_param { + source: "C:\\WM_LSTM\\1410450762.txt" + new_height: 32 + new_width: 280 + is_color: 1 + root_folder: "C:\\WM_LSTM\\train_data1\\" + batch_size: 96 + #rand_skip: 1968 + shuffle: true + } +} + +layer { + name: "data" + type: "ImageData" + #type: "Data" + top: "data" + top: "label" + include { + phase: TEST + } + transform_param { + mean_value:152 + mean_value:152 + mean_value:152 + } + image_data_param { + #data_param { + source: "C:\\WM_LSTM\\1410450762_test.txt" + new_height: 32 + new_width: 280 + is_color: 1 + root_folder: "C:\\WM_LSTM\\train_data1\\" + batch_size: 96 + shuffle: true + #rand_skip: 1968 + } +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + param { + lr_mult: 1 + decay_mult: 1 + } + convolution_param { + num_output: 64 + kernel_size: 5 + pad: 2 + stride: 2 + weight_filler { type: "msra"} + bias_filler { type: "constant" value: 0 } + } +} + + +# DenseBlock 1 +layer { + name: "DenseBlock1" + type: "DenseBlock" + bottom: "conv1" + top: "DenseBlock1" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm1" + type: "BatchNorm" + bottom: "DenseBlock1" + top: "BatchNorm1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale1" + type: "Scale" + bottom: "BatchNorm1" + top: "BatchNorm1" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU1" + type: "ReLU" + bottom: "BatchNorm1" + top: "BatchNorm1" +} +layer { + name: "Convolution2" + type: "Convolution" + bottom: "BatchNorm1" + top: "Convolution2" + convolution_param { + num_output: 128 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout1" + type: "Dropout" + bottom: "Convolution2" + top: "Dropout1" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling1" + type: "Pooling" + bottom: "Dropout1" + top: "Pooling1" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 2 +layer { + name: "DenseBlock2" + type: "DenseBlock" + bottom: "Pooling1" + top: "DenseBlock2" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm2" + type: "BatchNorm" + bottom: "DenseBlock2" + top: "BatchNorm2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale2" + type: "Scale" + bottom: "BatchNorm2" + top: "BatchNorm2" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU2" + type: "ReLU" + bottom: "BatchNorm2" + top: "BatchNorm2" +} + + + +layer { + name: "Convolution3" + type: "Convolution" + bottom: "BatchNorm2" + top: "Convolution3" + convolution_param { + num_output: 192 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout2" + type: "Dropout" + bottom: "Convolution3" + top: "Convolution3" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling2" + type: "Pooling" + bottom: "Convolution3" + top: "Pooling2" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 3 +layer { + name: "DenseBlock3" + type: "DenseBlock" + bottom: "Pooling2" + top: "DenseBlock3" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm3" + type: "BatchNorm" + bottom: "DenseBlock3" + top: "BatchNorm3" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale3" + type: "Scale" + bottom: "BatchNorm3" + top: "BatchNorm3" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU3" + type: "ReLU" + bottom: "BatchNorm3" + top: "BatchNorm3" +} + + +layer { + name: "pool5_ave" + type: "Pooling" + bottom: "BatchNorm3" + top: "pool5_ave" + pooling_param { + pool: AVE + kernel_w: 1 + kernel_h: 4 + stride_w: 1 + stride_h: 1 + } +} + +layer { + name: "pool5_ave_transpose" + top: "pool5_ave_transpose" + bottom: "pool5_ave" + type: "Transpose" + transpose_param { + dim: 3 + dim: 2 + dim: 0 + dim: 1 + } +} + +layer { + name: "blstm_input" + type: "Reshape" + bottom: "pool5_ave_transpose" + top: "blstm_input" + reshape_param { + shape { dim: -1 } + axis: 1 + num_axes: 2 + } +} + +#===================blstm layer 1============================ +#======lstm1=================== +layer { + name: "lstm1" + type: "Lstm" + bottom: "blstm_input" + top: "lstm1" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm1_reverse=================== +layer { + name: "lstm1-reverse1" + type: "Reverse" + bottom: "blstm_input" + top: "rlstm1_input" + reverse_param { + axis: 0 + } +} +layer { + name: "rlstm1" + type: "Lstm" + bottom: "rlstm1_input" + top: "rlstm1-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm1-reverse2" + type: "Reverse" + bottom: "rlstm1-output" + top: "rlstm1" + reverse_param { + axis: 0 + } +} + + +# merge lstm1 and rlstm1 +layer { + name: "blstm1" + type: "Eltwise" + bottom: "lstm1" + bottom: "rlstm1" + bottom: "blstm_input" + top: "blstm1" + eltwise_param { + operation: SUM + } +} + + + + +#===================blstm layer 2============================ +#======lstm2=================== +layer { + name: "lstm2" + type: "Lstm" + bottom: "blstm1" + top: "lstm2" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm2_reverse=================== +layer { + name: "lstm2-reverse1" + type: "Reverse" + bottom: "blstm1" + top: "rlstm2_input" + reverse_param { + axis: 0 + } +} + +layer { + name: "rlstm2" + type: "Lstm" + bottom: "rlstm2_input" + top: "rlstm2-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm2-reverse2" + type: "Reverse" + bottom: "rlstm2-output" + top: "rlstm2" + reverse_param { + axis: 0 + } +} + +# merge lstm2 and rlstm2 +layer { + name: "blstm2" + type: "Eltwise" + bottom: "lstm2" + bottom: "rlstm2" + bottom: "blstm1" + bottom: "blstm_input" + top: "blstm2" + eltwise_param { + operation: SUM + } +} + +layer { + name: "fc1x" + type: "InnerProduct" + bottom: "blstm2" + top: "fc1x" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + axis: 2 + num_output: 21 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} + + +layer { + name: "ctcloss" + type: "WarpCTCLoss" + bottom: "fc1x" + bottom: "label" + top: "ctcloss" + loss_weight:1 +} + +layer { + name: "acc" + type: "CTCGreedyDecoder" + bottom: "fc1x" + bottom: "label" + top: "acc" + include { + phase: TEST + } +} \ No newline at end of file diff --git a/ocr_test/wm_test/deploy.prototxt b/ocr_test/wm_test/deploy.prototxt new file mode 100644 index 0000000000000000000000000000000000000000..64ebb627666059701ccd93945c2e1e9c4bcc8198 --- /dev/null +++ b/ocr_test/wm_test/deploy.prototxt @@ -0,0 +1,514 @@ +name: "densenet" + +input: "data" +input_dim: 1 +input_dim: 3 +input_dim: 32 +input_dim: 280 + + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + param { + lr_mult: 1 + decay_mult: 1 + } + convolution_param { + num_output: 64 + kernel_size: 5 + pad: 2 + stride: 2 + weight_filler { type: "msra"} + bias_filler { type: "constant" value: 0 } + } +} + + +# DenseBlock 1 +layer { + name: "DenseBlock1" + type: "DenseBlock" + bottom: "conv1" + top: "DenseBlock1" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm1" + type: "BatchNorm" + bottom: "DenseBlock1" + top: "BatchNorm1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale1" + type: "Scale" + bottom: "BatchNorm1" + top: "BatchNorm1" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU1" + type: "ReLU" + bottom: "BatchNorm1" + top: "BatchNorm1" +} +layer { + name: "Convolution2" + type: "Convolution" + bottom: "BatchNorm1" + top: "Convolution2" + convolution_param { + num_output: 128 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout1" + type: "Dropout" + bottom: "Convolution2" + top: "Dropout1" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling1" + type: "Pooling" + bottom: "Dropout1" + top: "Pooling1" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 2 +layer { + name: "DenseBlock2" + type: "DenseBlock" + bottom: "Pooling1" + top: "DenseBlock2" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm2" + type: "BatchNorm" + bottom: "DenseBlock2" + top: "BatchNorm2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale2" + type: "Scale" + bottom: "BatchNorm2" + top: "BatchNorm2" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU2" + type: "ReLU" + bottom: "BatchNorm2" + top: "BatchNorm2" +} + + + +layer { + name: "Convolution3" + type: "Convolution" + bottom: "BatchNorm2" + top: "Convolution3" + convolution_param { + num_output: 192 + bias_term: false + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "msra" + } + } +} +layer { + name: "Dropout2" + type: "Dropout" + bottom: "Convolution3" + top: "Convolution3" + dropout_param { + dropout_ratio: 0.2 + } +} +layer { + name: "Pooling2" + type: "Pooling" + bottom: "Convolution3" + top: "Pooling2" + pooling_param { + pool: AVE + kernel_size: 2 + stride: 2 + } +} + +# DenseBlock 3 +layer { + name: "DenseBlock3" + type: "DenseBlock" + bottom: "Pooling2" + top: "DenseBlock3" + denseblock_param { + numTransition: 8 + initChannel: 64 + growthRate: 8 + Filter_Filler { + type: "msra" + } + BN_Scaler_Filler { + type: "constant" + value: 1 + } + BN_Bias_Filler { + type: "constant" + value: 0 + } + use_dropout: false + dropout_amount: 0.2 + } +} +layer { + name: "BatchNorm3" + type: "BatchNorm" + bottom: "DenseBlock3" + top: "BatchNorm3" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } +} +layer { + name: "Scale3" + type: "Scale" + bottom: "BatchNorm3" + top: "BatchNorm3" + scale_param { + filler { + value: 1 + } + bias_term: true + bias_filler { + value: 0 + } + } +} +layer { + name: "ReLU3" + type: "ReLU" + bottom: "BatchNorm3" + top: "BatchNorm3" +} + + +layer { + name: "pool5_ave" + type: "Pooling" + bottom: "BatchNorm3" + top: "pool5_ave" + pooling_param { + pool: AVE + kernel_w: 1 + kernel_h: 4 + stride_w: 1 + stride_h: 1 + } +} + +layer { + name: "pool5_ave_transpose" + top: "pool5_ave_transpose" + bottom: "pool5_ave" + type: "Transpose" + transpose_param { + dim: 3 + dim: 2 + dim: 0 + dim: 1 + } +} + +layer { + name: "blstm_input" + type: "Reshape" + bottom: "pool5_ave_transpose" + top: "blstm_input" + reshape_param { + shape { dim: -1 } + axis: 1 + num_axes: 2 + } +} + +#===================blstm layer 1============================ +#======lstm1=================== +layer { + name: "lstm1" + type: "Lstm" + bottom: "blstm_input" + top: "lstm1" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm1_reverse=================== +layer { + name: "lstm1-reverse1" + type: "Reverse" + bottom: "blstm_input" + top: "rlstm1_input" + reverse_param { + axis: 0 + } +} +layer { + name: "rlstm1" + type: "Lstm" + bottom: "rlstm1_input" + top: "rlstm1-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm1-reverse2" + type: "Reverse" + bottom: "rlstm1-output" + top: "rlstm1" + reverse_param { + axis: 0 + } +} + + +# merge lstm1 and rlstm1 +layer { + name: "blstm1" + type: "Eltwise" + bottom: "lstm1" + bottom: "rlstm1" + bottom: "blstm_input" + top: "blstm1" + eltwise_param { + operation: SUM + } +} + + + + +#===================blstm layer 2============================ +#======lstm2=================== +layer { + name: "lstm2" + type: "Lstm" + bottom: "blstm1" + top: "lstm2" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} + +# =====lstm2_reverse=================== +layer { + name: "lstm2-reverse1" + type: "Reverse" + bottom: "blstm1" + top: "rlstm2_input" + reverse_param { + axis: 0 + } +} + +layer { + name: "rlstm2" + type: "Lstm" + bottom: "rlstm2_input" + top: "rlstm2-output" + lstm_param { + num_output: 256 + weight_filler { + type: "gaussian" + std: 0.1 + } + bias_filler { + type: "constant" + } + } +} +layer { + name: "lstm2-reverse2" + type: "Reverse" + bottom: "rlstm2-output" + top: "rlstm2" + reverse_param { + axis: 0 + } +} + +# merge lstm2 and rlstm2 +layer { + name: "blstm2" + type: "Eltwise" + bottom: "lstm2" + bottom: "rlstm2" + bottom: "blstm1" + bottom: "blstm_input" + top: "blstm2" + eltwise_param { + operation: SUM + } +} + +layer { + name: "fc1x" + type: "InnerProduct" + bottom: "blstm2" + top: "fc1x" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + axis: 2 + num_output: 21 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} + + +layer { + name: "result" + type: "CTCGreedyDecoder" + bottom: "fc1x" + top: "result" +} \ No newline at end of file diff --git a/ocr_test/wm_test/label.txt b/ocr_test/wm_test/label.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e9cc8d98a175858fa8e69bcf5019e90eb4e34ea --- /dev/null +++ b/ocr_test/wm_test/label.txt @@ -0,0 +1,21 @@ +blank +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 \ No newline at end of file diff --git a/ocr_test/wm_test/mean_values.txt b/ocr_test/wm_test/mean_values.txt new file mode 100644 index 0000000000000000000000000000000000000000..f2c94c6579fc063bea87dec00bad89680fcf6473 --- /dev/null +++ b/ocr_test/wm_test/mean_values.txt @@ -0,0 +1,3 @@ +152 +152 +152 \ No newline at end of file diff --git a/ocr_test/wm_test/model.caffemodel b/ocr_test/wm_test/model.caffemodel new file mode 100644 index 0000000000000000000000000000000000000000..aa4704627999324b1b9d69d0a4c7cef94af4d049 Binary files /dev/null and b/ocr_test/wm_test/model.caffemodel differ diff --git a/ocr_test/x64/Release/bktree.obj b/ocr_test/x64/Release/bktree.obj new file mode 100644 index 0000000000000000000000000000000000000000..2547ad03e825db81427c388c153bba98d503dea1 Binary files /dev/null and b/ocr_test/x64/Release/bktree.obj differ diff --git a/ocr_test/x64/Release/ctcpp_entrypoint.obj b/ocr_test/x64/Release/ctcpp_entrypoint.obj new file mode 100644 index 0000000000000000000000000000000000000000..83f6c0427d280fd1a9644ea449cd3419cf78512e Binary files /dev/null and b/ocr_test/x64/Release/ctcpp_entrypoint.obj differ diff --git a/ocr_test/x64/Release/levenshtein.obj b/ocr_test/x64/Release/levenshtein.obj new file mode 100644 index 0000000000000000000000000000000000000000..ca17392b8769b1b5777dbaff4943904fa083dbde Binary files /dev/null and b/ocr_test/x64/Release/levenshtein.obj differ diff --git a/ocr_test/x64/Release/ocr_test.Build.CppClean.log b/ocr_test/x64/Release/ocr_test.Build.CppClean.log new file mode 100644 index 0000000000000000000000000000000000000000..afcb0c1848fd56b858aa609803f2ff8ce456f058 --- /dev/null +++ b/ocr_test/x64/Release/ocr_test.Build.CppClean.log @@ -0,0 +1,21 @@ +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.pch +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\vc140.pdb +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\stdafx.obj +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\levenshtein.obj +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\bktree.obj +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ctcpp_entrypoint.obj +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.obj +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\tools_bin\ocr_test.exe +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\tools_bin\ocr_test.ipdb +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\tools_bin\ocr_test.iobj +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\tools_bin\ocr_test.pdb +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\tools_bin\ocr_test.lib +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\tools_bin\ocr_test.exp +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\..\..\tools_bin\ocr_test.exe +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\cl.command.1.tlog +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\cl.read.1.tlog +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\cl.write.1.tlog +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\link.command.1.tlog +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\link.read.1.tlog +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\link.write.1.tlog +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn5.1_cuda8.0_vs2014\caffe-vsproj\ocr_test\x64\release\ocr_test.tlog\ocr_test.write.1u.tlog diff --git a/ocr_test/x64/Release/ocr_test.log b/ocr_test/x64/Release/ocr_test.log new file mode 100644 index 0000000000000000000000000000000000000000..0509119567ebe2ece2e5630bba79a1292d5a7bec --- /dev/null +++ b/ocr_test/x64/Release/ocr_test.log @@ -0,0 +1,275 @@ + ocr_test.cpp +ocr_test.cpp(207): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +ocr_test.cpp(208): warning C4244: “初始化”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(229): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +ocr_test.cpp(310): warning C4267: “参数”: 从“size_t”转换到“unsigned char”,可能丢失数据 +ocr_test.cpp(315): warning C4267: “参数”: 从“size_t”转换到“unsigned char”,可能丢失数据 +ocr_test.cpp(324): warning C4267: “参数”: 从“size_t”转换到“unsigned char”,可能丢失数据 +ocr_test.cpp(376): warning C4267: “参数”: 从“size_t”转换到“unsigned char”,可能丢失数据 +ocr_test.cpp(385): warning C4267: “参数”: 从“size_t”转换到“int”,可能丢失数据 +ocr_test.cpp(370): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 1 拥有了类型“std::size_t” + ocr_test.cpp(370): note: 请考虑在格式字符串中使用“%zd” +ocr_test.cpp(370): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 2 拥有了类型“unsigned __int64” + ocr_test.cpp(370): note: 请考虑在格式字符串中使用“%zd” +ocr_test.cpp(477): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 1 拥有了类型“std::size_t” + ocr_test.cpp(477): note: 请考虑在格式字符串中使用“%zd” +ocr_test.cpp(477): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 2 拥有了类型“unsigned __int64” + ocr_test.cpp(477): note: 请考虑在格式字符串中使用“%zd” + Unknown compiler version - please run the configure tests and report the results +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\include\caffe/util/io.hpp(49): warning C4244: “参数”: 从“uint64_t”转换到“int”,可能丢失数据 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(68): warning C4251: “MTCNN::PNet_”: class“boost::shared_ptr>”需要有 dll 接口由 class“MTCNN”的客户端使用 + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(58): note: 参见“boost::shared_ptr>”的声明 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(69): warning C4251: “MTCNN::RNet_”: class“boost::shared_ptr>”需要有 dll 接口由 class“MTCNN”的客户端使用 + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(58): note: 参见“boost::shared_ptr>”的声明 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(70): warning C4251: “MTCNN::ONet_”: class“boost::shared_ptr>”需要有 dll 接口由 class“MTCNN”的客户端使用 + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(58): note: 参见“boost::shared_ptr>”的声明 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(73): warning C4251: “MTCNN::condidate_rects_”: class“std::vector>”需要有 dll 接口由 class“MTCNN”的客户端使用 + with + [ + _Ty=FaceInfo + ] + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(46): note: 参见“std::vector>”的声明 + with + [ + _Ty=FaceInfo + ] +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(74): warning C4251: “MTCNN::total_boxes_”: class“std::vector>”需要有 dll 接口由 class“MTCNN”的客户端使用 + with + [ + _Ty=FaceInfo + ] + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(46): note: 参见“std::vector>”的声明 + with + [ + _Ty=FaceInfo + ] +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(75): warning C4251: “MTCNN::regressed_rects_”: class“std::vector>”需要有 dll 接口由 class“MTCNN”的客户端使用 + with + [ + _Ty=FaceInfo + ] + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(46): note: 参见“std::vector>”的声明 + with + [ + _Ty=FaceInfo + ] +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(76): warning C4251: “MTCNN::regressed_pading_”: class“std::vector>”需要有 dll 接口由 class“MTCNN”的客户端使用 + with + [ + _Ty=FaceInfo + ] + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(46): note: 参见“std::vector>”的声明 + with + [ + _Ty=FaceInfo + ] +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\caffe-gpu.h(78): warning C4251: “MTCNN::crop_img_”: class“std::vector>”需要有 dll 接口由 class“MTCNN”的客户端使用 + with + [ + _Ty=cv::Mat + ] + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\opensource\opencv\include\opencv2/core/mat.hpp(183): note: 参见“std::vector>”的声明 + with + [ + _Ty=cv::Mat + ] +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(3): warning C4005: “_CRT_SECURE_NO_WARNINGS”: 宏重定义 + d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(3): note: 命令行参数: 参见前面的“_CRT_SECURE_NO_WARNINGS”定义 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(228): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(232): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(261): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(272): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(273): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(293): warning C4267: “=”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(303): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(304): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(367): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(385): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(387): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(405): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(406): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(424): warning C4267: “=”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(440): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(441): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(461): warning C4267: “=”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(481): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(482): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(500): warning C4267: “=”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(512): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(513): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(561): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(595): warning C4267: “=”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(596): warning C4267: “=”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(604): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(607): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(680): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(714): warning C4244: “初始化”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(746): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(752): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(772): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(835): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(841): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(854): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(869): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(882): warning C4244: “=”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(1106): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(1748): warning C4244: “参数”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(1749): warning C4244: “参数”: 从“double”转换到“int”,可能丢失数据 +d:\ocr\plate_card_blstm\caffe_ocr-master_cudn7_cuda9.2_vs2014_wm_platecard\caffe-vsproj\ocr_test\common.h(1769): warning C4244: “return”: 从“__int64”转换到“double”,可能丢失数据 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\classifierCaffe.h(41): warning C4251: “Classifier::input_geometry_”: class“cv::Size_”需要有 dll 接口由 class“Classifier”的客户端使用 + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\opensource\opencv\include\opencv2/core/types.hpp(337): note: 参见“cv::Size_”的声明 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\classifierCaffe.h(53): warning C4251: “Classifier::net_”: class“std::shared_ptr>”需要有 dll 接口由 class“Classifier”的客户端使用 + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\classifierCaffe.h(53): note: 参见“std::shared_ptr>”的声明 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\classifierCaffe.h(56): warning C4251: “Classifier::mean_”: class“cv::Mat”需要有 dll 接口由 class“Classifier”的客户端使用 + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\include\caffe/common.hpp(73): note: 参见“cv::Mat”的声明 +D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\classifierCaffe.h(57): warning C4251: “Classifier::labels_”: class“std::vector>,std::allocator>>>”需要有 dll 接口由 class“Classifier”的客户端使用 + with + [ + _Ty=std::string + ] + D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\libClassification\classifierCaffe.h(57): note: 参见“std::vector>,std::allocator>>>”的声明 + with + [ + _Ty=std::string + ] +ocr_test.cpp(704): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +ocr_test.cpp(814): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(843): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(844): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(845): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(846): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(910): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(913): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(995): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +ocr_test.cpp(1126): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(1155): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1156): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1157): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1158): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1251): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(1254): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(1395): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 +ocr_test.cpp(1528): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(1557): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1558): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1559): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1560): warning C4244: “参数”: 从“int”转换到“float”,可能丢失数据 +ocr_test.cpp(1620): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(1623): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(1636): warning C4244: “参数”: 从“float”转换到“int”,可能丢失数据 +ocr_test.cpp(2093): warning C4244: “参数”: 从“double”转换到“int”,可能丢失数据 +ocr_test.cpp(1659): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 1 拥有了类型“unsigned __int64” + ocr_test.cpp(1659): note: 请考虑在格式字符串中使用“%zd” +ocr_test.cpp(1699): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 1 拥有了类型“unsigned __int64” + ocr_test.cpp(1699): note: 请考虑在格式字符串中使用“%zd” +ocr_test.cpp(1702): warning C4477: “printf”: 格式字符串“%d”需要类型“int”的参数,但可变参数 1 拥有了类型“unsigned __int64” + ocr_test.cpp(1702): note: 请考虑在格式字符串中使用“%zd” +ocr_test.cpp(1750): warning C4101: “predict6”: 未引用的局部变量 +ocr_test.cpp(1754): warning C4101: “loss6”: 未引用的局部变量 +C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\utility(200): warning C4267: “初始化”: 从“size_t”转换到“int”,可能丢失数据 + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xmemory0(737): note: 参见对正在编译的函数 模板 实例化“std::pair::pair(std::pair &&) noexcept”的引用 + with + [ + _Kty=wchar_t, + _Ty=int + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xmemory0(737): note: 参见对正在编译的函数 模板 实例化“std::pair::pair(std::pair &&) noexcept”的引用 + with + [ + _Kty=wchar_t, + _Ty=int + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xmemory0(857): note: 参见对正在编译的函数 模板 实例化“void std::allocator<_Other>::construct<_Objty,_Ty>(_Objty *,_Ty &&)”的引用 + with + [ + _Other=std::_Tree_node,void *>, + _Objty=std::pair, + _Ty=std::pair + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xmemory0(857): note: 参见对正在编译的函数 模板 实例化“void std::allocator<_Other>::construct<_Objty,_Ty>(_Objty *,_Ty &&)”的引用 + with + [ + _Other=std::_Tree_node,void *>, + _Objty=std::pair, + _Ty=std::pair + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xmemory0(996): note: 参见对正在编译的函数 模板 实例化“void std::allocator_traits<_Alloc>::construct<_Ty,std::pair>(std::allocator<_Other> &,_Objty *,std::pair &&)”的引用 + with + [ + _Alloc=std::allocator,void *>>, + _Ty=std::pair, + _Other=std::_Tree_node,void *>, + _Objty=std::pair + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xmemory0(995): note: 参见对正在编译的函数 模板 实例化“void std::allocator_traits<_Alloc>::construct<_Ty,std::pair>(std::allocator<_Other> &,_Objty *,std::pair &&)”的引用 + with + [ + _Alloc=std::allocator,void *>>, + _Ty=std::pair, + _Other=std::_Tree_node,void *>, + _Objty=std::pair + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xtree(889): note: 参见对正在编译的函数 模板 实例化“void std::_Wrap_alloc>::construct<_Ty,std::pair>(_Ty *,std::pair &&)”的引用 + with + [ + _Other=std::_Tree_node,void *>, + _Ty=std::pair + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xtree(887): note: 参见对正在编译的函数 模板 实例化“void std::_Wrap_alloc>::construct<_Ty,std::pair>(_Ty *,std::pair &&)”的引用 + with + [ + _Other=std::_Tree_node,void *>, + _Ty=std::pair + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xtree(1076): note: 参见对正在编译的函数 模板 实例化“std::_Tree_node,void *> *std::_Tree_comp_alloc<_Traits>::_Buynode>(std::pair &&)”的引用 + with + [ + _Kty=wchar_t, + _Ty=int, + _Traits=std::_Tmap_traits,std::allocator>,false> + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\xtree(1076): note: 参见对正在编译的函数 模板 实例化“std::_Tree_node,void *> *std::_Tree_comp_alloc<_Traits>::_Buynode>(std::pair &&)”的引用 + with + [ + _Kty=wchar_t, + _Ty=int, + _Traits=std::_Tmap_traits,std::allocator>,false> + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\map(194): note: 参见对正在编译的函数 模板 实例化“std::pair>>>,bool> std::_Tree>::emplace>(std::pair &&)”的引用 + with + [ + _Kty=wchar_t, + _Ty=int, + _Pr=std::less, + _Alloc=std::allocator> + ] + C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\include\map(194): note: 参见对正在编译的函数 模板 实例化“std::pair>>>,bool> std::_Tree>::emplace>(std::pair &&)”的引用 + with + [ + _Kty=wchar_t, + _Ty=int, + _Pr=std::less, + _Alloc=std::allocator> + ] + ocr_test.cpp(292): note: 参见对正在编译的函数 模板 实例化“std::pair>>>,bool> std::map<_Kty,_Ty,std::less<_Kty>,std::allocator>>::insert,void>(_Valty &&)”的引用 + with + [ + _Kty=wchar_t, + _Ty=int, + _Valty=std::pair + ] + ocr_test.cpp(292): note: 参见对正在编译的函数 模板 实例化“std::pair>>>,bool> std::map<_Kty,_Ty,std::less<_Kty>,std::allocator>>::insert,void>(_Valty &&)”的引用 + with + [ + _Kty=wchar_t, + _Ty=int, + _Valty=std::pair + ] + 正在创建库 ..\..\tools_bin\ocr_test.lib 和对象 ..\..\tools_bin\ocr_test.exp + 正在生成代码 + 8 of 1358 functions ( 0.6%) were compiled, the rest were copied from previous compilation. + 0 functions were new in current compilation + 74 functions had inline decision re-evaluated but remain unchanged + 已完成代码的生成 + ocr_test.vcxproj -> D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\ocr_test\..\..\tools_bin\ocr_test.exe + ocr_test.vcxproj -> ..\..\tools_bin\ocr_test.pdb (Full PDB) diff --git a/ocr_test/x64/Release/ocr_test.obj b/ocr_test/x64/Release/ocr_test.obj new file mode 100644 index 0000000000000000000000000000000000000000..7477247228d6dab540cbb2db70148dcda5886e70 Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.obj differ diff --git a/ocr_test/x64/Release/ocr_test.pch b/ocr_test/x64/Release/ocr_test.pch new file mode 100644 index 0000000000000000000000000000000000000000..fdffaa251b05f70a2417bc643af080619aba48fb Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.pch differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/CL.command.1.tlog b/ocr_test/x64/Release/ocr_test.tlog/CL.command.1.tlog new file mode 100644 index 0000000000000000000000000000000000000000..8eab66b6c430ab85bc684d083872bb704baefbbe Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/CL.command.1.tlog differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/CL.read.1.tlog b/ocr_test/x64/Release/ocr_test.tlog/CL.read.1.tlog new file mode 100644 index 0000000000000000000000000000000000000000..f2647023f0e1f5f7212b508f4bb9d3efef46827e Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/CL.read.1.tlog differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/CL.write.1.tlog b/ocr_test/x64/Release/ocr_test.tlog/CL.write.1.tlog new file mode 100644 index 0000000000000000000000000000000000000000..222fbb12f519a6a7939412f1f77e08b486334e03 Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/CL.write.1.tlog differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/link.command.1.tlog b/ocr_test/x64/Release/ocr_test.tlog/link.command.1.tlog new file mode 100644 index 0000000000000000000000000000000000000000..a8f7259f94b4474a9f949647727b2bde09a6c99c Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/link.command.1.tlog differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/link.read.1.tlog b/ocr_test/x64/Release/ocr_test.tlog/link.read.1.tlog new file mode 100644 index 0000000000000000000000000000000000000000..be9e5165baa29b60439da68369aa614fde32a051 Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/link.read.1.tlog differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/link.write.1.tlog b/ocr_test/x64/Release/ocr_test.tlog/link.write.1.tlog new file mode 100644 index 0000000000000000000000000000000000000000..d21529c5f2a10b5d98012faf95e9ba09f60f31ca Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/link.write.1.tlog differ diff --git a/ocr_test/x64/Release/ocr_test.tlog/ocr_test.lastbuildstate b/ocr_test/x64/Release/ocr_test.tlog/ocr_test.lastbuildstate new file mode 100644 index 0000000000000000000000000000000000000000..bc88a62314826d45ac78c7c5482fe97297237b8f --- /dev/null +++ b/ocr_test/x64/Release/ocr_test.tlog/ocr_test.lastbuildstate @@ -0,0 +1,2 @@ +#TargetFrameworkVersion=v4.0:PlatformToolSet=v140:EnableManagedIncrementalBuild=false:VCToolArchitecture=Native32Bit:WindowsTargetPlatformVersion=8.1 +Release|x64|D:\ocr\plate_card_BLSTM\caffe_ocr-master_CUDN7_CUDA9.2_vs2014_WM_plateCard\caffe-vsproj\| diff --git a/ocr_test/x64/Release/ocr_test.tlog/ocr_test.write.1u.tlog b/ocr_test/x64/Release/ocr_test.tlog/ocr_test.write.1u.tlog new file mode 100644 index 0000000000000000000000000000000000000000..4e1a8aebf1f8f9388525186ff7c11a379f91e046 Binary files /dev/null and b/ocr_test/x64/Release/ocr_test.tlog/ocr_test.write.1u.tlog differ diff --git a/ocr_test/x64/Release/stdafx.obj b/ocr_test/x64/Release/stdafx.obj new file mode 100644 index 0000000000000000000000000000000000000000..4b3bb314269640474738bddbd2474a2c472eb46d Binary files /dev/null and b/ocr_test/x64/Release/stdafx.obj differ diff --git a/ocr_test/x64/Release/vc140.pdb b/ocr_test/x64/Release/vc140.pdb new file mode 100644 index 0000000000000000000000000000000000000000..b3ddefefbf3ff04accd81f0a106cb4f33f325aa4 Binary files /dev/null and b/ocr_test/x64/Release/vc140.pdb differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195834.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195834.png" new file mode 100644 index 0000000000000000000000000000000000000000..07aaebdae213df57a790ebab4335266c40d7fc6c Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195834.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195858.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195858.png" new file mode 100644 index 0000000000000000000000000000000000000000..48e76ddf66f94cf6e1e54462222c3c05c8f8ff27 Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195858.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195903.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195903.png" new file mode 100644 index 0000000000000000000000000000000000000000..489b1f9bb0a46f6b3780ab8abe9f4bc7bef3b171 Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195903.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195908.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195908.png" new file mode 100644 index 0000000000000000000000000000000000000000..55e83cd829f7509107c254f318ec4234d9d3b30a Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195908.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195912.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195912.png" new file mode 100644 index 0000000000000000000000000000000000000000..cd0ca43d02a6d2418b6d19d761abd0225448bdfa Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195912.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195916.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195916.png" new file mode 100644 index 0000000000000000000000000000000000000000..98d82d31d3cdf7529d940ccd90d1178d6c39fc9c Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195916.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195919.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195919.png" new file mode 100644 index 0000000000000000000000000000000000000000..69a8735c3eb226b572703bd9026db1b5819d1564 Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195919.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195923.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195923.png" new file mode 100644 index 0000000000000000000000000000000000000000..844bbccabc12c5eeb6a2950fcc72cb19acfaac25 Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195923.png" differ diff --git "a/result_plateCard/QQ\345\233\276\347\211\20720180529195927.png" "b/result_plateCard/QQ\345\233\276\347\211\20720180529195927.png" new file mode 100644 index 0000000000000000000000000000000000000000..5e120f60539846dcd2dbf6f38b1678ed19640d5a Binary files /dev/null and "b/result_plateCard/QQ\345\233\276\347\211\20720180529195927.png" differ diff --git "a/result_plateCard/\345\276\256\344\277\241\346\210\252\345\233\276_20180530112203.png" "b/result_plateCard/\345\276\256\344\277\241\346\210\252\345\233\276_20180530112203.png" new file mode 100644 index 0000000000000000000000000000000000000000..7dc1fe63a23dffce9a9019172f662d0c97395df0 Binary files /dev/null and "b/result_plateCard/\345\276\256\344\277\241\346\210\252\345\233\276_20180530112203.png" differ