[Artoolkit] kpmMatching & Tracking of nftSimple
1. kpmMatching thread
main() -->
loadNFTData() -->
trackingInitInit() -->
In static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
for(;;)
{
if( threadStartWait(threadHandle) < 0 ) break;
kpmMatching(kpmHandle, imagePtr);
trackingInitHandle->flag = 0;
for( i = 0; i < kpmResultNum; i++ )
{
if( kpmResult[i].camPoseF != 0 ) continue;
ARLOGd("kpmGetPose OK.\n");
if( trackingInitHandle->flag == 0 || err > kpmResult[i].error )
{ // Take the first or best result.
trackingInitHandle->flag = 1;
trackingInitHandle->page = kpmResult[i].pageNo;
for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k];
err = kpmResult[i].error;
}
}
threadEndSignal(threadHandle); // --> Next
}
pthread_cond_signal 函数的作用是发送一个信号给另外一个正在处于阻塞等待状态的线程,使其脱离阻塞状态,继续执行。如果没有线程处在阻塞等待状态,pthread_cond_signal也会成功返回。
int threadEndSignal( THREAD_HANDLE_T *flag )
{
pthread_mutex_lock(&(flag->mut));
flag->endF = 1;
flag->busyF = 0;
pthread_cond_signal(&(flag->cond2));
pthread_mutex_unlock(&(flag->mut));
return 0;
}
精华全在这里,地位相当于Android的NDK: "./ARToolKit5-bin-5.3.2r1-Linux-x86_64/lib/SRC/KPM/"
unsw@unsw-UX303UB$ pwd /home/unsw/Android/ARToolKit5-bin-5.3.2r1-Linux-x86_64/lib/SRC/KPM unsw@unsw-UX303UB$ unsw@unsw-UX303UB$ unsw@unsw-UX303UB$ _cmd-wc-cpp 86 ./FreakMatcher/framework/logger.cpp 229 ./FreakMatcher/framework/image.cpp 96 ./FreakMatcher/framework/timers.cpp 56 ./FreakMatcher/framework/date_time.cpp 262 ./FreakMatcher/unsupported/test/polynomialsolver.cpp 74 ./FreakMatcher/unsupported/test/alignedvector3.cpp 237 ./FreakMatcher/unsupported/test/BVH.cpp 161 ./FreakMatcher/unsupported/test/matrix_exponential.cpp 140 ./FreakMatcher/unsupported/test/sparse_llt.cpp 188 ./FreakMatcher/unsupported/test/matrix_function.cpp 175 ./FreakMatcher/unsupported/test/sparse_ldlt.cpp 280 ./FreakMatcher/unsupported/test/FFTW.cpp 1861 ./FreakMatcher/unsupported/test/NonLinearOptimization.cpp 114 ./FreakMatcher/unsupported/test/NumericalDiff.cpp 5703 ./FreakMatcher/unsupported/test/mpreal/dlmalloc.c 507 ./FreakMatcher/unsupported/test/mpreal/mpreal.cpp 128 ./FreakMatcher/unsupported/test/polynomialutils.cpp 154 ./FreakMatcher/unsupported/test/sparse_extra.cpp 352 ./FreakMatcher/unsupported/test/openglsupport.cpp 2 ./FreakMatcher/unsupported/test/FFT.cpp 164 ./FreakMatcher/unsupported/test/autodiff.cpp 113 ./FreakMatcher/unsupported/test/sparse_lu.cpp 141 ./FreakMatcher/unsupported/test/forward_adolc.cpp 47 ./FreakMatcher/unsupported/test/mpreal_support.cpp 52 ./FreakMatcher/unsupported/doc/examples/BVH_Example.cpp 53 ./FreakMatcher/unsupported/doc/examples/PolynomialSolver1.cpp 20 ./FreakMatcher/unsupported/doc/examples/MatrixSine.cpp 16 ./FreakMatcher/unsupported/doc/examples/MatrixExponential.cpp 118 ./FreakMatcher/unsupported/doc/examples/FFT.cpp 23 ./FreakMatcher/unsupported/doc/examples/MatrixFunction.cpp 20 ./FreakMatcher/unsupported/doc/examples/MatrixSinh.cpp 20 ./FreakMatcher/unsupported/doc/examples/PolynomialUtils1.cpp 190 ./FreakMatcher/matchers/freak.cpp 235 ./FreakMatcher/matchers/hough_similarity_voting.cpp 67 ./FreakMatcher/matchers/feature_store_io.cpp 68 ./FreakMatcher/matchers/visual_database_io.cpp 208 ./FreakMatcher/detectors/gradients.cpp 518 ./FreakMatcher/detectors/pyramid.cpp 671 ./FreakMatcher/detectors/DoG_scale_invariant_detector.cpp 246 ./FreakMatcher/detectors/orientation_assignment.cpp 647 ./FreakMatcher/detectors/harris.cpp 385 ./FreakMatcher/detectors/gaussian_scale_space_pyramid.cpp 60 ./FreakMatcher/math/math_io.cpp 159 ./FreakMatcher/facade/visual_database_facade.cpp 66 ./kpmFopen.c 659 ./kpmRefDataSet.cpp 282 ./kpmHandle.cpp 103 ./kpmResult.cpp 1224 ./kpmUtil.cpp 793 ./kpmMatching.cpp 18173 total
kpmMatching.cpp
1 struct _KpmHandle {
2 #if !BINARY_FEATURE
3 SurfSubHandleT *surfHandle;
4 void *ann2;
5 #else
6 vision::VisualDatabaseFacade *freakMatcher;
7 #endif
8
9 ARParamLT *cparamLT;
10 int poseMode;
11 int xsize, ysize;
12 AR_PIXEL_FORMAT pixFormat;
13 KPM_PROC_MODE procMode;
14 int detectedMaxFeature;
15 #if !BINARY_FEATURE
16 int surfThreadNum;
17 #endif
18
19 KpmRefDataSet refDataSet;
20 KpmInputDataSet inDataSet;
21 #if !BINARY_FEATURE
22 KpmMatchResult preRANSAC;
23 KpmMatchResult aftRANSAC;
24 #endif
25
26 #if !BINARY_FEATURE
27 KpmSkipRegionSet skipRegion;
28 #endif
29
30 KpmResult *result;
31 int resultNum;
32 int pageIDs[DB_IMAGE_MAX];
33 };
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1 int kpmMatching( KpmHandle *kpmHandle, ARUint8 *inImage )
2 {
3 int xsize, ysize;
4 int xsize2, ysize2;
5 int procMode;
6 ARUint8 *inImageBW;
7 int i;
8 #if !BINARY_FEATURE
9 FeatureVector featureVector;
10 int *inlierIndex;
11 CorspMap preRANSAC;
12 int inlierNum;
13 CAnnMatch2 *ann2;
14 int *annMatch2;
15 int knn;
16 float h[3][3];
17 int j;
18 #endif
19 int ret;
20
21 if (!kpmHandle || !inImage) {
22 ARLOGe("kpmMatching(): NULL kpmHandle/inImage.\n");
23 return -1;
24 }
25
26 xsize = kpmHandle->xsize;
27 ysize = kpmHandle->ysize;
28 procMode = kpmHandle->procMode;
29
30 if (procMode == KpmProcFullSize && (kpmHandle->pixFormat == AR_PIXEL_FORMAT_MONO || kpmHandle->pixFormat == AR_PIXEL_FORMAT_420v || kpmHandle->pixFormat == AR_PIXEL_FORMAT_420f || kpmHandle->pixFormat == AR_PIXEL_FORMAT_NV21)) {
31 inImageBW = inImage;
32 } else {
33 inImageBW = kpmUtilGenBWImage( inImage, kpmHandle->pixFormat, xsize, ysize, procMode, &xsize2, &ysize2 );
34 if( inImageBW == NULL ) return -1;
35 }
36
37 #if BINARY_FEATURE
38 kpmHandle->freakMatcher->query(inImageBW, xsize ,ysize);
39 kpmHandle->inDataSet.num = (int)kpmHandle->freakMatcher->getQueryFeaturePoints().size();
40 #else
41 surfSubExtractFeaturePoint( kpmHandle->surfHandle, inImageBW, kpmHandle->skipRegion.region, kpmHandle->skipRegion.regionNum );
42 kpmHandle->skipRegion.regionNum = 0;
43 kpmHandle->inDataSet.num = featureVector.num = surfSubGetFeaturePointNum( kpmHandle->surfHandle );
44 #endif
45
46 if( kpmHandle->inDataSet.num != 0 ) {
47 if( kpmHandle->inDataSet.coord != NULL ) free(kpmHandle->inDataSet.coord);
48 #if !BINARY_FEATURE
49 if( kpmHandle->preRANSAC.match != NULL ) free(kpmHandle->preRANSAC.match);
50 if( kpmHandle->aftRANSAC.match != NULL ) free(kpmHandle->aftRANSAC.match);
51 #endif
52 arMalloc( kpmHandle->inDataSet.coord, KpmCoord2D, kpmHandle->inDataSet.num );
53 #if !BINARY_FEATURE
54 arMalloc( kpmHandle->preRANSAC.match, KpmMatchData, kpmHandle->inDataSet.num );
55 arMalloc( kpmHandle->aftRANSAC.match, KpmMatchData, kpmHandle->inDataSet.num );
56 #endif
57 #if BINARY_FEATURE
58 #else
59 arMalloc( featureVector.sf, SurfFeature, kpmHandle->inDataSet.num );
60 arMalloc( preRANSAC.mp, MatchPoint, kpmHandle->inDataSet.num );
61 arMalloc( inlierIndex, int, kpmHandle->inDataSet.num );
62
63 knn = 1;
64 arMalloc( annMatch2, int, kpmHandle->inDataSet.num*knn);
65 #endif
66
67 #if BINARY_FEATURE
68 const std::vector<vision::FeaturePoint>& points = kpmHandle->freakMatcher->getQueryFeaturePoints();
69 //const std::vector<unsigned char>& descriptors = kpmHandle->freakMatcher->getQueryDescriptors();
70 #endif
71 if( procMode == KpmProcFullSize ) {
72 for( i = 0 ; i < kpmHandle->inDataSet.num; i++ ) { // 对每个keypoint遍历,要做什么?提取特征向量
73
74 #if BINARY_FEATURE
75 float x = points[i].x, y = points[i].y;
76 #else
77 float x, y, *desc;
78 surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
79 desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
80 for( j = 0; j < SURF_SUB_DIMENSION; j++ ) {
81 featureVector.sf[i].v[j] = desc[j];
82 }
83 featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
84 #endif
85 if( kpmHandle->cparamLT != NULL ) {
86 arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x, y, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
87 }
88 else {
89 kpmHandle->inDataSet.coord[i].x = x;
90 kpmHandle->inDataSet.coord[i].y = y;
91 }
92 }
93 }
94 else if( procMode == KpmProcTwoThirdSize ) {
95 for( i = 0 ; i < kpmHandle->inDataSet.num; i++ ) {
96 #if BINARY_FEATURE
97 float x = points[i].x, y = points[i].y;
98 #else
99 float x, y, *desc;
100 surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
101 desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
102 for( j = 0; j < SURF_SUB_DIMENSION; j++ ) {
103 featureVector.sf[i].v[j] = desc[j];
104 }
105 featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
106 #endif
107 if( kpmHandle->cparamLT != NULL ) {
108 arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*1.5f, y*1.5f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
109 }
110 else {
111 kpmHandle->inDataSet.coord[i].x = x*1.5f;
112 kpmHandle->inDataSet.coord[i].y = y*1.5f;
113 }
114 }
115 }
116 else if( procMode == KpmProcHalfSize ) {
117 for( i = 0 ; i < kpmHandle->inDataSet.num; i++ ) {
118 #if BINARY_FEATURE
119 float x = points[i].x, y = points[i].y;
120 #else
121 float x, y, *desc;
122 surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
123 desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
124 for( j = 0; j < SURF_SUB_DIMENSION; j++ ) {
125 featureVector.sf[i].v[j] = desc[j];
126 }
127 featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
128 #endif
129 if( kpmHandle->cparamLT != NULL ) {
130 arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*2.0f, y*2.0f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
131 }
132 else {
133 kpmHandle->inDataSet.coord[i].x = x*2.0f;
134 kpmHandle->inDataSet.coord[i].y = y*2.0f;
135 }
136 }
137 }
138 else if( procMode == KpmProcOneThirdSize ) {
139 for( i = 0 ; i < kpmHandle->inDataSet.num; i++ ) {
140 #if BINARY_FEATURE
141 float x = points[i].x, y = points[i].y;
142 #else
143 float x, y, *desc;
144 surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
145 desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
146 for( j = 0; j < SURF_SUB_DIMENSION; j++ ) {
147 featureVector.sf[i].v[j] = desc[j];
148 }
149 featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
150 #endif
151 if( kpmHandle->cparamLT != NULL ) {
152 arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*3.0f, y*3.0f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
153 }
154 else {
155 kpmHandle->inDataSet.coord[i].x = x*3.0f;
156 kpmHandle->inDataSet.coord[i].y = y*3.0f;
157 }
158 }
159 }
160 else { // procMode == KpmProcQuatSize
161 for( i = 0 ; i < kpmHandle->inDataSet.num; i++ ) {
162 #if BINARY_FEATURE
163 float x = points[i].x, y = points[i].y;
164 #else
165 float x, y, *desc;
166 surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
167 desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
168 for( j = 0; j < SURF_SUB_DIMENSION; j++ ) {
169 featureVector.sf[i].v[j] = desc[j];
170 }
171 featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
172 #endif
173 if( kpmHandle->cparamLT != NULL ) {
174 arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*4.0f, y*4.0f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
175 }
176 else {
177 kpmHandle->inDataSet.coord[i].x = x*4.0f;
178 kpmHandle->inDataSet.coord[i].y = y*4.0f;
179 }
180 }
181 }
182
183 #if !BINARY_FEATURE
184 ann2 = (CAnnMatch2*)kpmHandle->ann2;
185 ann2->Match(&featureVector, knn, annMatch2); // knn = 1 就等于不聚类咯
186 for(int pageLoop = 0; pageLoop < kpmHandle->resultNum; pageLoop++ ) {
187 kpmHandle->preRANSAC.num = 0;
188 kpmHandle->aftRANSAC.num = 0;
189
190 kpmHandle->result[pageLoop].pageNo = kpmHandle->refDataSet.pageInfo[pageLoop].pageNo;
191 kpmHandle->result[pageLoop].camPoseF = -1;
192 if( kpmHandle->result[pageLoop].skipF ) continue;
193
194 int featureNum = 0;
195 int *annMatch2Ptr = annMatch2;
196 int pageNo = kpmHandle->refDataSet.pageInfo[pageLoop].pageNo;
197 for( i = 0; i < kpmHandle->inDataSet.num; i++ ) {
198 for( j = 0; j < knn; j++ ) {
199 if( *annMatch2Ptr >= 0 && kpmHandle->refDataSet.refPoint[*annMatch2Ptr].pageNo == pageNo ) {
200 kpmHandle->preRANSAC.match[featureNum].inIndex = i;
201 kpmHandle->preRANSAC.match[featureNum].refIndex = *annMatch2Ptr;
202 preRANSAC.mp[featureNum].x1 = kpmHandle->inDataSet.coord[i].x;
203 preRANSAC.mp[featureNum].y1 = kpmHandle->inDataSet.coord[i].y;
204 preRANSAC.mp[featureNum].x2 = kpmHandle->refDataSet.refPoint[*annMatch2Ptr].coord3D.x;
205 preRANSAC.mp[featureNum].y2 = kpmHandle->refDataSet.refPoint[*annMatch2Ptr].coord3D.y;
206 featureNum++;
207 annMatch2Ptr += knn-j;
208 break;
209 }
210 annMatch2Ptr++;
211 }
212 }
213 //printf("Page[%d] %d\n", pageLoop, featureNum);
214 preRANSAC.num = featureNum;
215 if( featureNum < 6 ) continue;
216
217 if( kpmRansacHomograhyEstimation(&preRANSAC, inlierIndex, &inlierNum, h) < 0 ) { //不知从哪个库引来的函数
218 inlierNum = 0;
219 }
220 //printf(" --> page[%d] %d pre:%3d, aft:%3d\n", pageLoop, kpmHandle->inDataSet.num, preRANSAC.num, inlierNum);
221 if( inlierNum < 6 ) continue;
222 // 最终有效的特征匹配点要大于等于6个
223 kpmHandle->preRANSAC.num = preRANSAC.num;
224 kpmHandle->aftRANSAC.num = inlierNum;
225 for( i = 0; i < inlierNum; i++ ) {
226 kpmHandle->aftRANSAC.match[i].inIndex = kpmHandle->preRANSAC.match[inlierIndex[i]].inIndex;
227 kpmHandle->aftRANSAC.match[i].refIndex = kpmHandle->preRANSAC.match[inlierIndex[i]].refIndex;
228 }
229 //printf(" ---> %d %d %d\n", kpmHandle->inDataSet.num, kpmHandle->preRANSAC.num, kpmHandle->aftRANSAC.num);
230 if( kpmHandle->poseMode == KpmPose6DOF ) {
231 //printf("----- Page %d ------\n", pageLoop);
232 ret = kpmUtilGetPose(kpmHandle->cparamLT, &(kpmHandle->aftRANSAC), &(kpmHandle->refDataSet), &(kpmHandle->inDataSet),
233 kpmHandle->result[pageLoop].camPose, &(kpmHandle->result[pageLoop].error) );
234 ARLOGi("Pose - %s",arrayToString2(kpmHandle->result[pageLoop].camPose).c_str());
235 //printf("----- End. ------\n");
236 }
237 else {
238 ret = kpmUtilGetPoseHomography(&(kpmHandle->aftRANSAC), &(kpmHandle->refDataSet), &(kpmHandle->inDataSet),
239 kpmHandle->result[pageLoop].camPose, &(kpmHandle->result[pageLoop].error) );
240 }
241 if( ret == 0 ) {
242 kpmHandle->result[pageLoop].camPoseF = 0;
243 kpmHandle->result[pageLoop].inlierNum = inlierNum;
244 ARLOGi("Page[%d] pre:%3d, aft:%3d, error = %f\n", pageLoop, preRANSAC.num, inlierNum, kpmHandle->result[pageLoop].error);
245 }
246 }
247 free(annMatch2);
248 #else
249 for (int pageLoop = 0; pageLoop < kpmHandle->resultNum; pageLoop++) {
250
251 kpmHandle->result[pageLoop].pageNo = kpmHandle->refDataSet.pageInfo[pageLoop].pageNo;
252 kpmHandle->result[pageLoop].camPoseF = -1;
253 if( kpmHandle->result[pageLoop].skipF ) continue;
254
255
256 const vision::matches_t& matches = kpmHandle->freakMatcher->inliers();
257 int matched_image_id = kpmHandle->freakMatcher->matchedId();
258 if (matched_image_id < 0) continue;
259
260 ret = kpmUtilGetPose_binary(kpmHandle->cparamLT,
261 matches ,
262 kpmHandle->freakMatcher->get3DFeaturePoints(matched_image_id),
263 kpmHandle->freakMatcher->getQueryFeaturePoints(),
264 kpmHandle->result[pageLoop].camPose,
265 &(kpmHandle->result[pageLoop].error) );
266 //ARLOGi("Pose (freak) - %s",arrayToString2(kpmHandle->result[pageLoop].camPose).c_str());
267 if( ret == 0 ) {
268 kpmHandle->result[pageLoop].camPoseF = 0;
269 kpmHandle->result[pageLoop].inlierNum = (int)matches.size();
270 kpmHandle->result[pageLoop].pageNo = kpmHandle->pageIDs[matched_image_id];
271 ARLOGi("Page[%d] pre:%3d, aft:%3d, error = %f\n", pageLoop, (int)matches.size(), (int)matches.size(), kpmHandle->result[pageLoop].error);
272 }
273 }
274 #endif
275 #if !BINARY_FEATURE
276 free(featureVector.sf);
277 free(preRANSAC.mp);
278 free(inlierIndex);
279 #endif
280 }
281 else {
282 #if !BINARY_FEATURE
283 kpmHandle->preRANSAC.num = 0;
284 kpmHandle->aftRANSAC.num = 0;
285 #endif
286 for( i = 0; i < kpmHandle->resultNum; i++ ) {
287 kpmHandle->result[i].camPoseF = -1;
288 }
289 }
290
291 for( i = 0; i < kpmHandle->resultNum; i++ ) kpmHandle->result[i].skipF = 0;
292
293 if (inImageBW != inImage) free( inImageBW );
294
295 return 0;
296 }
Therefore, kpmHandler gets camPose.
2. main thread
static void mainLoop(void):上述 camPose 由 (2) 接收。接下来再分析 track object 是怎么一回事。
主线程:
主线程:
1 int ar2Tracking( AR2HandleT *ar2Handle, AR2SurfaceSetT *surfaceSet, ARUint8 *dataPtr, float trans[3][4], float *err ) 2 { 3 AR2TemplateCandidateT *candidatePtr; 4 AR2TemplateCandidateT *cp[AR2_THREAD_MAX]; 5 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 6 float aveBlur; 7 #endif 8 int num, num2; 9 int i, j, k; 10 11 if (!ar2Handle || !surfaceSet || !dataPtr || !trans || !err) return (-1); 12 13 if( surfaceSet->contNum <= 0 ) { 14 ARLOGd("ar2Tracking() error: ar2SetInitTrans() must be called first.\n"); 15 return -2; 16 } 17 18 *err = 0.0F; 19 20 for( i = 0; i < surfaceSet->num; i++ ) { 21 arUtilMatMulf( (const float (*)[4])surfaceSet->trans1, (const float (*)[4])surfaceSet->surface[i].trans, ar2Handle->wtrans1[i] ); 22 if( surfaceSet->contNum > 1 ) arUtilMatMulf( (const float (*)[4])surfaceSet->trans2, (const float (*)[4])surfaceSet->surface[i].trans, ar2Handle->wtrans2[i] ); 23 if( surfaceSet->contNum > 2 ) arUtilMatMulf( (const float (*)[4])surfaceSet->trans3, (const float (*)[4])surfaceSet->surface[i].trans, ar2Handle->wtrans3[i] ); 24 } 25 26 if( ar2Handle->trackingMode == AR2_TRACKING_6DOF ) { 27 extractVisibleFeatures(ar2Handle->cparamLT, ar2Handle->wtrans1, surfaceSet, ar2Handle->candidate, ar2Handle->candidate2); 28 } 29 else { 30 extractVisibleFeaturesHomography(ar2Handle->xsize, ar2Handle->ysize, ar2Handle->wtrans1, surfaceSet, ar2Handle->candidate, ar2Handle->candidate2); 31 } 32 33 candidatePtr = ar2Handle->candidate; 34 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 35 aveBlur = 0.0F; 36 #endif 37 i = 0; // Counts up to searchFeatureNum. 38 num = 0; 39 while( i < ar2Handle->searchFeatureNum ) { 40 num2 = num; 41 for( j = 0; j < ar2Handle->threadNum; j++ ) { 42 if( i == ar2Handle->searchFeatureNum ) break; 43 44 k = ar2SelectTemplate( candidatePtr, surfaceSet->prevFeature, num2, ar2Handle->pos, ar2Handle->xsize, ar2Handle->ysize ); 45 if( k < 0 ) { 46 if( candidatePtr == ar2Handle->candidate ) { 47 candidatePtr = ar2Handle->candidate2; 48 k = ar2SelectTemplate( candidatePtr, surfaceSet->prevFeature, num2, ar2Handle->pos, ar2Handle->xsize, ar2Handle->ysize ); 49 if( k < 0 ) break; // PRL 2012-05-15: Give up if we can't select template from alternate candidate either. 50 } 51 else break; 52 } 53 54 cp[j] = &(candidatePtr[k]); 55 ar2Handle->pos[num2][0] = candidatePtr[k].sx; 56 ar2Handle->pos[num2][1] = candidatePtr[k].sy; 57 ar2Handle->arg[j].ar2Handle = ar2Handle; 58 ar2Handle->arg[j].surfaceSet = surfaceSet; 59 ar2Handle->arg[j].candidate = &(candidatePtr[k]); 60 ar2Handle->arg[j].dataPtr = dataPtr; 61 62 threadStartSignal( ar2Handle->threadHandle[j] ); // 激活一个线程,cond1:计算tracking result 63 num2++; 64 if( num2 == 5 ) num2 = num; 65 i++; 66 } 67 k = j; 68 if( k == 0 ) break; 69 70 for( j = 0; j < k; j++ ) { 71 threadEndWait( ar2Handle->threadHandle[j] ); // 等待被激活,cond2 72 73 if( ar2Handle->arg[j].ret == 0 && ar2Handle->arg[j].result.sim > ar2Handle->simThresh ) { 74 if( ar2Handle->trackingMode == AR2_TRACKING_6DOF ) { 75 #ifdef ARDOUBLE_IS_FLOAT 76 (ar2Handle->cparamLT->param.dist_factor, 77 ar2Handle->arg[j].result.pos2d[0], ar2Handle->arg[j].result.pos2d[1], 78 &ar2Handle->pos2d[num][0], &ar2Handle->pos2d[num][1], ar2Handle->cparamLT->param.dist_function_version); 79 #else 80 ARdouble pos2d0, pos2d1; 81 arParamObserv2Ideal(ar2Handle->cparamLT->param.dist_factor, 82 (ARdouble)(ar2Handle->arg[j].result.pos2d[0]), (ARdouble)(ar2Handle->arg[j].result.pos2d[1]), 83 &pos2d0, &pos2d1, ar2Handle->cparamLT->param.dist_function_version); 84 ar2Handle->pos2d[num][0] = (float)pos2d0; 85 ar2Handle->pos2d[num][1] = (float)pos2d1; 86 #endif 87 } 88 else { 89 ar2Handle->pos2d[num][0] = ar2Handle->arg[j].result.pos2d[0]; 90 ar2Handle->pos2d[num][1] = ar2Handle->arg[j].result.pos2d[1]; 91 } 92 ar2Handle->pos3d[num][0] = ar2Handle->arg[j].result.pos3d[0]; 93 ar2Handle->pos3d[num][1] = ar2Handle->arg[j].result.pos3d[1]; 94 ar2Handle->pos3d[num][2] = ar2Handle->arg[j].result.pos3d[2]; 95 ar2Handle->pos[num][0] = cp[j]->sx; 96 ar2Handle->pos[num][1] = cp[j]->sy; 97 ar2Handle->usedFeature[num].snum = cp[j]->snum; 98 ar2Handle->usedFeature[num].level = cp[j]->level; 99 ar2Handle->usedFeature[num].num = cp[j]->num; 100 ar2Handle->usedFeature[num].flag = 0; 101 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 102 aveBlur += ar2Handle->arg[j].result.blurLevel; 103 #endif 104 num++; 105 } 106 } 107 } 108 for( i = 0; i < num; i++ ) { 109 surfaceSet->prevFeature[i] = ar2Handle->usedFeature[i]; 110 } 111 surfaceSet->prevFeature[num].flag = -1; 112 //ARLOG("------\nNum = %d\n", num); 113 114 if( ar2Handle->trackingMode == AR2_TRACKING_6DOF ) { 115 if( num < 3 ) { 116 surfaceSet->contNum = 0; 117 return -3; 118 } 119 *err = ar2GetTransMat( ar2Handle->icpHandle, surfaceSet->trans1, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 0 ); 120 //ARLOG("outlier 0%%: err = %f, num = %d\n", *err, num); 121 if( *err > ar2Handle->trackingThresh ) { 122 icpSetInlierProbability( ar2Handle->icpHandle, 0.8F ); 123 *err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1 ); 124 //ARLOG("outlier 20%%: err = %f, num = %d\n", *err, num); 125 if( *err > ar2Handle->trackingThresh ) { 126 icpSetInlierProbability( ar2Handle->icpHandle, 0.6F ); 127 *err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1 ); 128 //ARLOG("outlier 60%%: err = %f, num = %d\n", *err, num); 129 if( *err > ar2Handle->trackingThresh ) { 130 icpSetInlierProbability( ar2Handle->icpHandle, 0.4F ); 131 *err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1 ); 132 //ARLOG("outlier 60%%: err = %f, num = %d\n", *err, num); 133 if( *err > ar2Handle->trackingThresh ) { 134 icpSetInlierProbability( ar2Handle->icpHandle, 0.0F ); 135 *err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1 ); 136 //ARLOG("outlier Max: err = %f, num = %d\n", *err, num); 137 if( *err > ar2Handle->trackingThresh ) { 138 surfaceSet->contNum = 0; 139 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 140 if( ar2Handle->blurMethod == AR2_ADAPTIVE_BLUR ) ar2Handle->blurLevel = AR2_DEFAULT_BLUR_LEVEL; // Reset the blurLevel. 141 #endif 142 return -4; 143 } 144 } 145 } 146 } 147 } 148 } 149 else { 150 if( num < 3 ) { 151 surfaceSet->contNum = 0; 152 return -3; 153 } 154 *err = ar2GetTransMatHomography( surfaceSet->trans1, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 0, 1.0F ); 155 //ARLOG("outlier 0%%: err = %f, num = %d\n", *err, num); 156 if( *err > ar2Handle->trackingThresh ) { 157 *err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1, 0.8F ); 158 //ARLOG("outlier 20%%: err = %f, num = %d\n", *err, num); 159 if( *err > ar2Handle->trackingThresh ) { 160 *err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1, 0.6F ); 161 //ARLOG("outlier 40%%: err = %f, num = %d\n", *err, num); 162 if( *err > ar2Handle->trackingThresh ) { 163 *err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1, 0.4F ); 164 //ARLOG("outlier 60%%: err = %f, num = %d\n", *err, num); 165 if( *err > ar2Handle->trackingThresh ) { 166 *err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, 1, 0.0F ); 167 //ARLOG("outlier Max: err = %f, num = %d\n", *err, num); 168 if( *err > ar2Handle->trackingThresh ) { 169 surfaceSet->contNum = 0; 170 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 171 if( ar2Handle->blurMethod == AR2_ADAPTIVE_BLUR ) ar2Handle->blurLevel = AR2_DEFAULT_BLUR_LEVEL; // Reset the blurLevel. 172 #endif 173 return -4; 174 } 175 } 176 } 177 } 178 } 179 } 180 181 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 182 if( ar2Handle->blurMethod == AR2_ADAPTIVE_BLUR ) { 183 aveBlur = aveBlur/num + 0.5F; 184 ar2Handle->blurLevel += (int)aveBlur - 1; 185 if( ar2Handle->blurLevel < 1 ) ar2Handle->blurLevel = 1; 186 if( ar2Handle->blurLevel >= AR2_BLUR_IMAGE_MAX-1 ) ar2Handle->blurLevel = AR2_BLUR_IMAGE_MAX-2; 187 } 188 #endif 189 190 surfaceSet->contNum++; 191 for( j = 0; j < 3; j++ ) { 192 for( i = 0; i < 4; i++ ) surfaceSet->trans3[j][i] = surfaceSet->trans2[j][i]; 193 } 194 for( j = 0; j < 3; j++ ) { 195 for( i = 0; i < 4; i++ ) surfaceSet->trans2[j][i] = surfaceSet->trans1[j][i]; 196 } 197 for( j = 0; j < 3; j++ ) { 198 for( i = 0; i < 4; i++ ) surfaceSet->trans1[j][i] = trans[j][i]; 199 } 200 201 return 0; 202 }
3. tracking thread
main() -->
initNFT() -->
ar2CreateHandle() -->
struct _THREAD_HANDLE_T {
int ID;
int startF; // 0 = no request pending, 1 = start please, 2 = quit please.
int endF; // 0 = worker not started or worker running, 1 = worker completed, 2 = worker will quit (exit).
int busyF; // 0 = worker not started or worker ended, 1 = worker busy.
//pthread_t thread;
pthread_mutex_t mut;
pthread_cond_t cond1; // Signals from client that startF has changed.
pthread_cond_t cond2; // Signals from worker that endF has changed.
void *arg;
};
// lib/src/ar2/Handle.c
static AR2HandleT *ar2CreateHandleSub( int pixFormat, int xsize, int ysize, int threadNum )
{
AR2HandleT *ar2Handle;
int i;
arMalloc(ar2Handle, AR2HandleT, 1);
ar2Handle->pixFormat = pixFormat;
ar2Handle->xsize = xsize;
ar2Handle->ysize = ysize;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
ar2Handle->blurMethod = AR2_DEFAULT_BLUR_METHOD;
ar2Handle->blurLevel = AR2_DEFAULT_BLUR_LEVEL;
#endif
ar2Handle->searchSize = AR2_DEFAULT_SEARCH_SIZE;
ar2Handle->templateSize1 = AR2_DEFAULT_TS1;
ar2Handle->templateSize2 = AR2_DEFAULT_TS2;
ar2Handle->searchFeatureNum = AR2_DEFAULT_SEARCH_FEATURE_NUM;
if( ar2Handle->searchFeatureNum > AR2_SEARCH_FEATURE_MAX ) {
ar2Handle->searchFeatureNum = AR2_SEARCH_FEATURE_MAX;
}
ar2Handle->simThresh = AR2_DEFAULT_SIM_THRESH;
ar2Handle->trackingThresh = AR2_DEFAULT_TRACKING_THRESH;
if( threadNum == AR2_TRACKING_DEFAULT_THREAD_NUM ) {
threadNum = threadGetCPU();
}
if( threadNum < 1 ) {
threadNum = 1;
}
if( threadNum > AR2_THREAD_MAX ) {
threadNum = AR2_THREAD_MAX;
}
ar2Handle->threadNum = threadNum;
ARLOGi("Tracking thread = %d\n", threadNum);
for( i = 0; i < ar2Handle->threadNum; i++ ) {
arMalloc( ar2Handle->arg[i].mfImage, ARUint8, xsize*ysize );
ar2Handle->arg[i].templ = NULL;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
ar2Handle->arg[i].templ2 = NULL;
#endif
ar2Handle->threadHandle[i] = threadInit(i, &(ar2Handle->arg[i]), ar2Tracking2d); // 单独一个线程做跟踪处理 --> next
}
return ar2Handle;
}
1 void *ar2Tracking2d( THREAD_HANDLE_T *threadHandle )
2 {
3 AR2Tracking2DParamT *arg;
4 int ID;
5
6 arg = (AR2Tracking2DParamT *)threadGetArg(threadHandle);
7 ID = threadGetID(threadHandle);
8
9 ARLOGi("Start tracking_thread #%d.\n", ID);
10 for(;;) {
11 if( threadStartWait(threadHandle) < 0 ) break;
12
13 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 自适应模板?
14 arg->ret = ar2Tracking2dSub( arg->ar2Handle, arg->surfaceSet, arg->candidate,
15 arg->dataPtr, arg->mfImage, &(arg->templ), &(arg->templ2), &(arg->result) );
16 #else
17 arg->ret = ar2Tracking2dSub( arg->ar2Handle, arg->surfaceSet, arg->candidate,
18 arg->dataPtr, arg->mfImage, &(arg->templ), &(arg->result) ); // --> next
19 #endif
20 threadEndSignal(threadHandle);
21 }
22 ARLOGi("End tracking_thread #%d.\n", ID);
23
24 return NULL;
25 }
1 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
2 static int ar2Tracking2dSub ( AR2HandleT *handle, AR2SurfaceSetT *surfaceSet, AR2TemplateCandidateT *candidate,
3 ARUint8 *dataPtr, ARUint8 *mfImage, AR2TemplateT **templ,
4 AR2Template2T **templ2, AR2Tracking2DResultT *result )
5 #else
6 static int ar2Tracking2dSub ( AR2HandleT *handle, AR2SurfaceSetT *surfaceSet, AR2TemplateCandidateT *candidate,
7 ARUint8 *dataPtr, ARUint8 *mfImage, AR2TemplateT **templ,
8 AR2Tracking2DResultT *result )
9 #endif
10 {
11 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
12 AR2Template2T *templ2;
13 #endif
14 int snum, level, fnum;
15 int search[3][2];
16 int bx, by;
17
18 snum = candidate->snum;
19 level = candidate->level;
20 fnum = candidate->num;
21
22 if( *templ == NULL ) *templ = ar2GenTemplate( handle->templateSize1, handle->templateSize2 );
23 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
24 if( *templ2 == NULL ) *templ2 = ar2GenTemplate2( handle->templateSize1, handle->templateSize2 );
25 #endif
26
27 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
28 if( handle->blurMethod == AR2_CONSTANT_BLUR ) {
29 if( ar2SetTemplateSub( handle->cparamLT,
30 (const float (*)[4])handle->wtrans1[snum],
31 surfaceSet->surface[snum].imageSet,
32 &(surfaceSet->surface[snum].featureSet->list[level]),
33 fnum,
34 handle->blurLevel,
35 *templ ) < 0 ) return -1;
36
37 if( (*templ)->vlen * (*templ)->vlen
38 < ((*templ)->xts1+(*templ)->xts2+1) * ((*templ)->yts1+(*templ)->yts2+1)
39 * AR2_DEFAULT_TRACKING_SD_THRESH * AR2_DEFAULT_TRACKING_SD_THRESH ) {
40 return -1;
41 }
42 }
43 else {
44 if( ar2SetTemplate2Sub( handle->cparamLT,
45 (const float (*)[4])handle->wtrans1[snum],
46 surfaceSet->surface[snum].imageSet,
47 &(surfaceSet->surface[snum].featureSet->list[level]),
48 fnum,
49 handle->blurLevel,
50 *templ2 ) < 0 ) return -1;
51
52 if( (*templ2)->vlen[1] * (*templ2)->vlen[1]
53 < ((*templ2)->xts1+(*templ2)->xts2+1) * ((*templ2)->yts1+(*templ2)->yts2+1)
54 * AR2_DEFAULT_TRACKING_SD_THRESH * AR2_DEFAULT_TRACKING_SD_THRESH ) {
55 return -1;
56 }
57 }
58 #else
59 if( ar2SetTemplateSub( handle->cparamLT,
60 (const float (*)[4])handle->wtrans1[snum],
61 surfaceSet->surface[snum].imageSet,
62 &(surfaceSet->surface[snum].featureSet->list[level]),
63 fnum,
64 *templ ) < 0 ) return -1;
65
66 if( (*templ)->vlen * (*templ)->vlen
67 < ((*templ)->xts1 + (*templ)->xts2 + 1) * ((*templ)->yts1 + (*templ)->yts2 + 1)
68 * AR2_DEFAULT_TRACKING_SD_THRESH * AR2_DEFAULT_TRACKING_SD_THRESH ) {
69 return -1;
70 }
71 #endif
72
73 // Get the screen coordinates for up to three previous positions of this feature into search[][].
74 if( surfaceSet->contNum == 1 ) {
75 ar2GetSearchPoint( handle->cparamLT,
76 (const float (*)[4])handle->wtrans1[snum], NULL, NULL,
77 &(surfaceSet->surface[snum].featureSet->list[level].coord[fnum]),
78 search );
79 }
80 else if( surfaceSet->contNum == 2 ) {
81 ar2GetSearchPoint( handle->cparamLT,
82 (const float (*)[4])handle->wtrans1[snum],
83 (const float (*)[4])handle->wtrans2[snum], NULL,
84 &(surfaceSet->surface[snum].featureSet->list[level].coord[fnum]),
85 search );
86 }
87 else {
88 ar2GetSearchPoint( handle->cparamLT,
89 (const float (*)[4])handle->wtrans1[snum],
90 (const float (*)[4])handle->wtrans2[snum],
91 (const float (*)[4])handle->wtrans3[snum],
92 &(surfaceSet->surface[snum].featureSet->list[level].coord[fnum]),
93 search );
94 }
95
96 #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
97 if( handle->blurMethod == AR2_CONSTANT_BLUR ) {
98 if( ar2GetBestMatching( dataPtr,
99 mfImage,
100 handle->xsize,
101 handle->ysize,
102 handle->pixFormat,
103 *templ,
104 handle->searchSize,
105 handle->searchSize,
106 search,
107 &bx, &by,
108 &(result->sim)) < 0 ) {
109 return -1;
110 }
111 result->blurLevel = handle->blurLevel;
112 }
113 else {
114 if( ar2GetBestMatching2( dataPtr,
115 mfImage,
116 handle->xsize,
117 handle->ysize,
118 handle->pixFormat,
119 *templ2,
120 handle->searchSize,
121 handle->searchSize,
122 search,
123 &bx, &by,
124 &(result->sim),
125 &(result->blurLevel)) < 0 ) {
126 return -1;
127 }
128 }
129 #else
130 if( ar2GetBestMatching( dataPtr,
131 mfImage,
132 handle->xsize,
133 handle->ysize,
134 handle->pixFormat,
135 *templ,
136 handle->searchSize,
137 handle->searchSize,
138 search,
139 &bx, &by,
140 &(result->sim)) < 0 ) {
141 return -1;
142 }
143 #endif
144
145 result->pos2d[0] = (float)bx;
146 result->pos2d[1] = (float)by;
147 result->pos3d[0] = surfaceSet->surface[snum].trans[0][0] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].mx
148 + surfaceSet->surface[snum].trans[0][1] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].my
149 + surfaceSet->surface[snum].trans[0][3];
150 result->pos3d[1] = surfaceSet->surface[snum].trans[1][0] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].mx
151 + surfaceSet->surface[snum].trans[1][1] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].my
152 + surfaceSet->surface[snum].trans[1][3];
153 result->pos3d[2] = surfaceSet->surface[snum].trans[2][0] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].mx
154 + surfaceSet->surface[snum].trans[2][1] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].my
155 + surfaceSet->surface[snum].trans[2][3];
156
157 return 0;
158 }
tracking提取keypoint没看出来在哪里处理?