[Artoolkit] Framework Analysis of nftSimple

What is nftSimple?

Loads NFT dataset names from a configuration file.

The example uses the “Pinball.jpg” image supplied in the “Misc/patterns” folder. ARToolKit NFT requires a fast device, preferably dual-core for good operation, e.g. Samsung Galaxy SII or similar. Build/deployment for Android API 9 (Android OS v2.3) or later is recommended.

 


nftSimple - NFT Example

For developers who are already familiar with the code of simpleLite, it will be useful to do a side-by-side comparison of the code of nftSimple.

The basic flow of program operations (grab a frame, track markers, render) is very similar,

however there are significant changes in how the marker information is handled, as well as the control flow in the tracking loop

To choose a resolution no greater than 800×600. A resolution of 640×480 is perfectly acceptable for NFT, and the greater frame rate achievable by using this resolution rather than a higher resolution is of more advantage than a larger frame size.

 

 

1. simpleLite -- Linux版本 

/*!
    @typedef ARHandle
    @abstract   (description)
    @discussion (description)
    @field        arDebug (description)
    @field        arPixelFormat (description)
    @field        arPixelSize (description)
    @field        arLabelingMode (description)
    @field        arLabelingThresh (description)
    @field        arImageProcMode
        To query this value, call arGetImageProcMode(). To set this value, call arSetImageProcMode().
    @field        arPatternDetectionMode (description)
    @field        arMarkerExtractionMode (description)
    @field        arParamLT (description)
    @field        marker_num (description)
    @field        markerInfo (description)
    @field        marker2_num (description)
    @field        markerInfo2 (description)
    @field        history_num (description)
    @field        history (description)
    @field        labelInfo (description)
    @field        pattHandle (description)
    @field      pattRatio A value between 0.0 and 1.0, representing the proportion of the marker width which constitutes the pattern. In earlier versions, this value was fixed at 0.5.
    @field      matrixCodeType When matrix code pattern detection mode is active, indicates the type of matrix code to detect.
 */

typedef struct {
    int                arDebug;
    AR_PIXEL_FORMAT    arPixelFormat;
    int                arPixelSize;
    int                arLabelingMode;
    int                arLabelingThresh;
    int                arImageProcMode;
    int                arPatternDetectionMode;
    int                arMarkerExtractionMode;
    ARParamLT         *arParamLT;
    int                xsize;
    int                ysize;
    int                marker_num;
    ARMarkerInfo       markerInfo[AR_SQUARE_MAX];
    int                marker2_num;
    ARMarkerInfo2      markerInfo2[AR_SQUARE_MAX];
    int                history_num;
    ARTrackingHistory  history[AR_SQUARE_MAX];
    ARLabelInfo        labelInfo;
    ARPattHandle      *pattHandle;
    AR_LABELING_THRESH_MODE arLabelingThreshMode;
    int                arLabelingThreshAutoInterval;
    int                arLabelingThreshAutoIntervalTTL;
    int                arLabelingThreshAutoBracketOver;
    int                arLabelingThreshAutoBracketUnder;
    ARImageProcInfo   *arImageProcInfo;
    ARdouble           pattRatio;
    AR_MATRIX_CODE_TYPE matrixCodeType;
} ARHandle;
ARHandle
/*
 *  simpleLite.c
 *
 *  Copyright 2015 Daqri LLC. All Rights Reserved.
 *  Copyright 2002-2015 ARToolworks, Inc. All Rights Reserved.
 *
 *  Author(s): Philip Lamb.
 *
 */

// ============================================================================
//    Includes
// ============================================================================

#include <stdio.h>
#include <string.h>
#ifdef _WIN32
#  define snprintf _snprintf
#endif
#include <stdlib.h>                    // malloc(), free()
#ifdef __APPLE__
#  include <GLUT/glut.h>
#else
#  include <GL/glut.h>
#endif
#include <AR/config.h>
#include <AR/video.h>
#include <AR/param.h>            // arParamDisp()
#include <AR/ar.h>
#include <AR/gsub_lite.h>

// ============================================================================
//    Constants
// ============================================================================

#define VIEW_SCALEFACTOR        1.0         // Units received from ARToolKit tracking will be multiplied by this factor before being used in OpenGL drawing.
#define VIEW_DISTANCE_MIN       40.0        // Objects closer to the camera than this will not be displayed. OpenGL units.
#define VIEW_DISTANCE_MAX       10000.0     // Objects further away from the camera than this will not be displayed. OpenGL units.

// ============================================================================
//    Global variables
// ============================================================================

// Preferences.
static int windowed = TRUE;                     // Use windowed (TRUE) or fullscreen mode (FALSE) on launch.
static int windowWidth  = 640;                  // Initial window width, also updated during program execution.
static int windowHeight = 480;                  // Initial window height, also updated during program execution.
static int windowDepth  = 32;                   // Fullscreen mode bit depth.
static int windowRefresh= 0;                    // Fullscreen mode refresh rate. Set to 0 to use default rate.

// Image acquisition.
static ARUint8        *gARTImage = NULL;
static int             gARTImageSavePlease = FALSE;

// Marker detection.
static ARHandle        *gARHandle = NULL;
static ARPattHandle    *gARPattHandle = NULL;
static long             gCallCountMarkerDetect = 0;
 
// Transformation matrix retrieval.
static AR3DHandle     *gAR3DHandle = NULL;
static ARdouble        gPatt_width     = 80.0;    // Per-marker, but we are using only 1 marker.
static ARdouble        gPatt_trans[3][4];         // Per-marker, but we are using only 1 marker.
static int             gPatt_found = FALSE;       // Per-marker, but we are using only 1 marker.
static int             gPatt_id;                  // Per-marker, but we are using only 1 marker.

// Drawing.
static ARParamLT                   *gCparamLT = NULL;
static ARGL_CONTEXT_SETTINGS_REF    gArglSettings = NULL;
static int gShowHelp = 1;
static int gShowMode = 1;
static int gDrawRotate = FALSE;
static float gDrawRotateAngle = 0;            // For use in drawing.


// ============================================================================
//    Function prototypes.
// ============================================================================

static void print(const char *text, const float x, const float y, int calculateXFromRightEdge, int calculateYFromTopEdge);
static void drawBackground(const float width, const float height, const float x, const float y);
static void printHelpKeys();
static void printMode();

// ============================================================================
//    Functions
// ============================================================================

// Something to look at, draw a rotating colour cube.
static void DrawCube(void)
{
    // Colour cube data.
    int i;
    float fSize = 40.0f;
    const GLfloat cube_vertices [8][3] = {
        /* +z */ {0.5f, 0.5f, 0.5f}, {0.5f, -0.5f, 0.5f}, {-0.5f, -0.5f, 0.5f}, {-0.5f, 0.5f, 0.5f},
        /* -z */ {0.5f, 0.5f, -0.5f}, {0.5f, -0.5f, -0.5f}, {-0.5f, -0.5f, -0.5f}, {-0.5f, 0.5f, -0.5f} };
    const GLubyte cube_vertex_colors [8][4] = {
        {255, 255, 255, 255}, {255, 255, 0, 255}, {0, 255, 0, 255}, {0, 255, 255, 255},
        {255, 0, 255, 255}, {255, 0, 0, 255}, {0, 0, 0, 255}, {0, 0, 255, 255} };
    const GLubyte cube_faces [6][4] = { /* ccw-winding */
        /* +z */ {3, 2, 1, 0}, /* -y */ {2, 3, 7, 6}, /* +y */ {0, 1, 5, 4},
        /* -x */ {3, 0, 4, 7}, /* +x */ {1, 2, 6, 5}, /* -z */ {4, 5, 6, 7} };
    
    glPushMatrix(); // Save world coordinate system.
    glRotatef(gDrawRotateAngle, 0.0f, 0.0f, 1.0f); // Rotate about z axis.
    glScalef(fSize, fSize, fSize);
    glTranslatef(0.0f, 0.0f, 0.5f); // Place base of cube on marker surface.
    glDisable(GL_LIGHTING);
    glDisable(GL_TEXTURE_2D);
    glDisable(GL_BLEND);
    glColorPointer(4, GL_UNSIGNED_BYTE, 0, cube_vertex_colors);
    glVertexPointer(3, GL_FLOAT, 0, cube_vertices);
    glEnableClientState(GL_VERTEX_ARRAY);
    glEnableClientState(GL_COLOR_ARRAY);
    for (i = 0; i < 6; i++) {
        glDrawElements(GL_TRIANGLE_FAN, 4, GL_UNSIGNED_BYTE, &(cube_faces[i][0]));
    }
    glDisableClientState(GL_COLOR_ARRAY);
    glColor4ub(0, 0, 0, 255);
    for (i = 0; i < 6; i++) {
        glDrawElements(GL_LINE_LOOP, 4, GL_UNSIGNED_BYTE, &(cube_faces[i][0]));
    }
    glPopMatrix();    // Restore world coordinate system.
}

static void DrawCubeUpdate(float timeDelta)
{
    if (gDrawRotate) {
        gDrawRotateAngle += timeDelta * 45.0f; // Rotate cube at 45 degrees per second.
        if (gDrawRotateAngle > 360.0f) gDrawRotateAngle -= 360.0f;
    }
}

static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p, ARHandle **arhandle, AR3DHandle **ar3dhandle)
{    
    ARParam            cparam;
    int                xsize, ysize;
    AR_PIXEL_FORMAT pixFormat;

    // Open the video path.
    if (arVideoOpen(vconf) < 0) {
        ARLOGe("setupCamera(): Unable to open connection to camera.\n");
        return (FALSE);
    }
    
    // Find the size of the window.
    if (arVideoGetSize(&xsize, &ysize) < 0) {
        ARLOGe("setupCamera(): Unable to determine camera frame size.\n");
        arVideoClose();
        return (FALSE);
    }
    ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize);
    
    // Get the format in which the camera is returning pixels.
    pixFormat = arVideoGetPixelFormat();
    if (pixFormat == AR_PIXEL_FORMAT_INVALID) {
        ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n");
        arVideoClose();
        return (FALSE);
    }
    
    // Load the camera parameters, resize for the window and init.
    if (arParamLoad(cparam_name, 1, &cparam) < 0) {
        ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name);
        arVideoClose();
        return (FALSE);
    }
    if (cparam.xsize != xsize || cparam.ysize != ysize) {
        ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
        arParamChangeSize(&cparam, xsize, ysize, &cparam);
    }
#ifdef DEBUG
    ARLOG("*** Camera Parameter ***\n");
    arParamDisp(&cparam);
#endif
    if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
        ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
        return (FALSE);
    }

    if ((*arhandle = arCreateHandle(*cparamLT_p)) == NULL) {
        ARLOGe("setupCamera(): Error: arCreateHandle.\n");
        return (FALSE);
    }
    if (arSetPixelFormat(*arhandle, pixFormat) < 0) {
        ARLOGe("setupCamera(): Error: arSetPixelFormat.\n");
        return (FALSE);
    }
    if (arSetDebugMode(*arhandle, AR_DEBUG_DISABLE) < 0) {
        ARLOGe("setupCamera(): Error: arSetDebugMode.\n");
        return (FALSE);
    }
    if ((*ar3dhandle = ar3DCreateHandle(&cparam)) == NULL) {
        ARLOGe("setupCamera(): Error: ar3DCreateHandle.\n");
        return (FALSE);
    }
    
    if (arVideoCapStart() != 0) {
        ARLOGe("setupCamera(): Unable to begin camera data capture.\n");
        return (FALSE);        
    }
    
    return (TRUE);
}

static int setupMarker(const char *patt_name, int *patt_id, ARHandle *arhandle, ARPattHandle **pattHandle_p)
{    
    if ((*pattHandle_p = arPattCreateHandle()) == NULL) {
        ARLOGe("setupMarker(): Error: arPattCreateHandle.\n");
        return (FALSE);
    }
    
    // Loading only 1 pattern in this example.
    if ((*patt_id = arPattLoad(*pattHandle_p, patt_name)) < 0) {
        ARLOGe("setupMarker(): Error loading pattern file %s.\n", patt_name);
        arPattDeleteHandle(*pattHandle_p);
        return (FALSE);
    }
    
    arPattAttach(arhandle, *pattHandle_p);
    
    return (TRUE);
}

static void cleanup(void)
{
    arglCleanup(gArglSettings);
    gArglSettings = NULL;
    arPattDetach(gARHandle);
    arPattDeleteHandle(gARPattHandle);
    arVideoCapStop();
    ar3DDeleteHandle(&gAR3DHandle);
    arDeleteHandle(gARHandle);
    arParamLTFree(&gCparamLT);
    arVideoClose();
}

static void Keyboard(unsigned char key, int x, int y)
{
    int mode, threshChange = 0;
    AR_LABELING_THRESH_MODE modea;
    
    switch (key) {
        case 0x1B:                        // Quit.
        case 'Q':
        case 'q':
            cleanup();
            exit(0);
            break;
        case ' ':
            gDrawRotate = !gDrawRotate;
            break;
        case 'X':
        case 'x':
            arGetImageProcMode(gARHandle, &mode);
            switch (mode) {
                case AR_IMAGE_PROC_FRAME_IMAGE:  mode = AR_IMAGE_PROC_FIELD_IMAGE; break;
                case AR_IMAGE_PROC_FIELD_IMAGE:
                default: mode = AR_IMAGE_PROC_FRAME_IMAGE; break;
            }
            arSetImageProcMode(gARHandle, mode);
            break;
        case 'C':
        case 'c':
            ARLOGe("*** Camera - %f (frame/sec)\n", (double)gCallCountMarkerDetect/arUtilTimer());
            gCallCountMarkerDetect = 0;
            arUtilTimerReset();
            break;
        case 'a':
        case 'A':
            arGetLabelingThreshMode(gARHandle, &modea);
            switch (modea) {
                case AR_LABELING_THRESH_MODE_MANUAL:        modea = AR_LABELING_THRESH_MODE_AUTO_MEDIAN; break;
                case AR_LABELING_THRESH_MODE_AUTO_MEDIAN:   modea = AR_LABELING_THRESH_MODE_AUTO_OTSU; break;
                case AR_LABELING_THRESH_MODE_AUTO_OTSU:     modea = AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE; break;
                case AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE: modea = AR_LABELING_THRESH_MODE_AUTO_BRACKETING; break;
                case AR_LABELING_THRESH_MODE_AUTO_BRACKETING:
                default: modea = AR_LABELING_THRESH_MODE_MANUAL; break;
            }
            arSetLabelingThreshMode(gARHandle, modea);
            break;
        case '-':
            threshChange = -5;
            break;
        case '+':
        case '=':
            threshChange = +5;
            break;
        case 'D':
        case 'd':
            arGetDebugMode(gARHandle, &mode);
            arSetDebugMode(gARHandle, !mode);
            break;
        case 's':
        case 'S':
            if (!gARTImageSavePlease) gARTImageSavePlease = TRUE;
            break;
        case '?':
        case '/':
            gShowHelp++;
            if (gShowHelp > 1) gShowHelp = 0;
            break;
        case 'm':
        case 'M':
            gShowMode = !gShowMode;
            break;
        default:
            break;
    }
    if (threshChange) {
        int threshhold;
        arGetLabelingThresh(gARHandle, &threshhold);
        threshhold += threshChange;
        if (threshhold < 0) threshhold = 0;
        if (threshhold > 255) threshhold = 255;
        arSetLabelingThresh(gARHandle, threshhold);
    }
    
}

static void mainLoop(void)
{
    static int imageNumber = 0;
    static int ms_prev;
    int        ms;
    float      s_elapsed;
    ARUint8   *image;
    ARdouble   err;

    int j, k;
    
    // Find out how long since mainLoop() last ran.
    ms = glutGet(GLUT_ELAPSED_TIME);
    s_elapsed = (float)(ms - ms_prev) * 0.001f;
    if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
    ms_prev = ms;
    
    // Update drawing. 怎么又画一次?Display不是有画么?
    DrawCubeUpdate(s_elapsed);
    
    // Grab a video frame.
    if ((image = arVideoGetImage()) != NULL) {
        gARTImage = image;    // Save the fetched image.
        
        if (gARTImageSavePlease) {
            char imageNumberText[15];
            sprintf(imageNumberText, "image-%04d.jpg", imageNumber++);
            if (arVideoSaveImageJPEG(gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, gARTImage, imageNumberText, 75, 0) < 0) {
                ARLOGe("Error saving video image.\n");
            }
            gARTImageSavePlease = FALSE;
        }
        
        gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
        
        // Detect the markers in the video frame.
        if (arDetectMarker(gARHandle, gARTImage) < 0) {  // -->
            exit(-1);
        }
        
        // Check through the marker_info array for highest confidence
        // visible marker matching our preferred pattern.
        k = -1;
        for (j = 0; j < gARHandle->marker_num; j++) {
            if (gARHandle->markerInfo[j].id == gPatt_id) {
                if (k == -1) k = j; // First marker detected.
                else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected.
            }
        }
        
        if (k != -1) {
            // Get the transformation between the marker and the real camera into gPatt_trans.
            err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans);
            gPatt_found = TRUE;
        } else {
            gPatt_found = FALSE;
        }
        
        // Tell GLUT the display has changed.
        glutPostRedisplay();
    }
}

//
//    This function is called on events when the visibility of the
//    GLUT window changes (including when it first becomes visible).
//
static void Visibility(int visible)
{
    if (visible == GLUT_VISIBLE) {
        glutIdleFunc(mainLoop);
    } else {
        glutIdleFunc(NULL);
    }
}

//
//    This function is called when the
//    GLUT window is resized.
//
static void Reshape(int w, int h)
{
    windowWidth = w;
    windowHeight = h;
    
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glViewport(0, 0, (GLsizei) w, (GLsizei) h);
    
    // Call through to anyone else who needs to know about window sizing here.
}

//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
    ARdouble m[16];
    
    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);
    gARTImage = NULL; // Invalidate image data.
                
    // Projection transformation.
    arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
    glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
    glMatrixMode(GL_MODELVIEW);
        
    glEnable(GL_DEPTH_TEST);

    // Viewing transformation.
    glLoadIdentity();
    // Lighting and geometry that moves with the camera should go here.
    // (I.e. must be specified before viewing transformations.)
    //none
    
    if (gPatt_found) {
    
        // Calculate the camera position relative to the marker.
        // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
        arglCameraViewRH((const ARdouble (*)[4])gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

        // All lighting and geometry to be drawn relative to the marker goes here.
        DrawCube();
    
    } // gPatt_found
    
    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
    
    glutSwapBuffers();
}

int main(int argc, char** argv)
{
    char glutGamemode[32];
    char cparam_name[] = "Data/camera_para.dat";
    char vconf[] = "";
    char patt_name[]  = "Data/hiro.patt";
    
    //
    // Library inits.
    //

    glutInit(&argc, argv);

    //
    // Video setup.
    //

    if (!setupCamera(cparam_name, vconf, &gCparamLT, &gARHandle, &gAR3DHandle)) {
        ARLOGe("main(): Unable to set up AR camera.\n");
        exit(-1);
    }

    //
    // Graphics setup.
    //

    // Set up GL context(s) for OpenGL to draw into.
    glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);
    if (!windowed) {
        if (windowRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", windowWidth, windowHeight, windowDepth, windowRefresh);
        else sprintf(glutGamemode, "%ix%i:%i", windowWidth, windowHeight, windowDepth);
        glutGameModeString(glutGamemode);
        glutEnterGameMode();
    } else {
        glutInitWindowSize(windowWidth, windowHeight);
        glutCreateWindow(argv[0]);
    }

    // Setup ARgsub_lite library for current OpenGL context.
    if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) {
        ARLOGe("main(): arglSetupForCurrentContext() returned error.\n");
        cleanup();
        exit(-1);
    }
    arglSetupDebugMode(gArglSettings, gARHandle);
    arUtilTimerReset();
        
    // Load marker(s).
    if (!setupMarker(patt_name, &gPatt_id, gARHandle, &gARPattHandle)) {
        ARLOGe("main(): Unable to set up AR marker.\n");
        cleanup();
        exit(-1);
    }
    
    // Register GLUT event-handling callbacks.
    // NB: mainLoop() is registered by Visibility.
    glutDisplayFunc(Display);
    glutReshapeFunc(Reshape);
    glutVisibilityFunc(Visibility);
    glutKeyboardFunc(Keyboard);
    
    glutMainLoop();

    return (0);
}

//
// The following functions provide the onscreen help text and mode info.
//

static void print(const char *text, const float x, const float y, int calculateXFromRightEdge, int calculateYFromTopEdge)
{
    int i, len;
    GLfloat x0, y0;
    
    if (!text) return;
    
    if (calculateXFromRightEdge) {
        x0 = windowWidth - x - (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (const unsigned char *)text);
    } else {
        x0 = x;
    }
    if (calculateYFromTopEdge) {
        y0 = windowHeight - y - 10.0f;
    } else {
        y0 = y;
    }
    glRasterPos2f(x0, y0);
    
    len = (int)strlen(text);
    for (i = 0; i < len; i++) glutBitmapCharacter(GLUT_BITMAP_HELVETICA_10, text[i]);
}

static void drawBackground(const float width, const float height, const float x, const float y)
{
    GLfloat vertices[4][2];
    
    vertices[0][0] = x; vertices[0][1] = y;
    vertices[1][0] = width + x; vertices[1][1] = y;
    vertices[2][0] = width + x; vertices[2][1] = height + y;
    vertices[3][0] = x; vertices[3][1] = height + y;
    glLoadIdentity();
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glEnable(GL_BLEND);
    glVertexPointer(2, GL_FLOAT, 0, vertices);
    glEnableClientState(GL_VERTEX_ARRAY);
    glColor4f(0.0f, 0.0f, 0.0f, 0.5f);    // 50% transparent black.
    glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
    glColor4f(1.0f, 1.0f, 1.0f, 1.0f); // Opaque white.
    //glLineWidth(1.0f);
    //glDrawArrays(GL_LINE_LOOP, 0, 4);
    glDisableClientState(GL_VERTEX_ARRAY);
    glDisable(GL_BLEND);
}

static void printHelpKeys()
{
    int i;
    GLfloat  w, bw, bh;
    const char *helpText[] = {
        "Keys:\n",
        " ? or /        Show/hide this help.",
        " q or [esc]    Quit program.",
        " d             Activate / deactivate debug mode.",
        " m             Toggle display of mode info.",
        " a             Toggle between available threshold modes.",
        " - and +       Switch to manual threshold mode, and adjust threshhold up/down by 5.",
        " x             Change image processing mode.",
        " c             Calulcate frame rate.",
    };
#define helpTextLineCount (sizeof(helpText)/sizeof(char *))
    
    bw = 0.0f;
    for (i = 0; i < helpTextLineCount; i++) {
        w = (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (unsigned char *)helpText[i]);
        if (w > bw) bw = w;
    }
    bh = helpTextLineCount * 10.0f /* character height */+ (helpTextLineCount - 1) * 2.0f /* line spacing */;
    drawBackground(bw, bh, 2.0f, 2.0f);
    
    for (i = 0; i < helpTextLineCount; i++) print(helpText[i], 2.0f, (helpTextLineCount - 1 - i)*12.0f + 2.0f, 0, 0);;
}

static void printMode()
{
    int len, thresh, line, mode, xsize, ysize;
    AR_LABELING_THRESH_MODE threshMode;
    ARdouble tempF;
    char text[256], *text_p;

    glColor3ub(255, 255, 255);
    line = 1;
    
    // Image size and processing mode.
    arVideoGetSize(&xsize, &ysize);
    arGetImageProcMode(gARHandle, &mode);
    if (mode == AR_IMAGE_PROC_FRAME_IMAGE) text_p = "full frame";
    else text_p = "even field only";
    snprintf(text, sizeof(text), "Processing %dx%d video frames %s", xsize, ysize, text_p);
    print(text, 2.0f,  (line - 1)*12.0f + 2.0f, 0, 1);
    line++;
    
    // Threshold mode, and threshold, if applicable.
    arGetLabelingThreshMode(gARHandle, &threshMode);
    switch (threshMode) {
        case AR_LABELING_THRESH_MODE_MANUAL: text_p = "MANUAL"; break;
        case AR_LABELING_THRESH_MODE_AUTO_MEDIAN: text_p = "AUTO_MEDIAN"; break;
        case AR_LABELING_THRESH_MODE_AUTO_OTSU: text_p = "AUTO_OTSU"; break;
        case AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE: text_p = "AUTO_ADAPTIVE"; break;
        case AR_LABELING_THRESH_MODE_AUTO_BRACKETING: text_p = "AUTO_BRACKETING"; break;
        default: text_p = "UNKNOWN"; break;
    }
    snprintf(text, sizeof(text), "Threshold mode: %s", text_p);
    if (threshMode != AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE) {
        arGetLabelingThresh(gARHandle, &thresh);
        len = (int)strlen(text);
        snprintf(text + len, sizeof(text) - len, ", thresh=%d", thresh);
    }
    print(text, 2.0f,  (line - 1)*12.0f + 2.0f, 0, 1);
    line++;
    
    // Border size, image processing mode, pattern detection mode.
    arGetBorderSize(gARHandle, &tempF);
    snprintf(text, sizeof(text), "Border: %0.1f%%", tempF*100.0);
    arGetPatternDetectionMode(gARHandle, &mode);
    switch (mode) {
        case AR_TEMPLATE_MATCHING_COLOR: text_p = "Colour template (pattern)"; break;
        case AR_TEMPLATE_MATCHING_MONO: text_p = "Mono template (pattern)"; break;
        case AR_MATRIX_CODE_DETECTION: text_p = "Matrix (barcode)"; break;
        case AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX: text_p = "Colour template + Matrix (2 pass, pattern + barcode)"; break;
        case AR_TEMPLATE_MATCHING_MONO_AND_MATRIX: text_p = "Mono template + Matrix (2 pass, pattern + barcode "; break;
        default: text_p = "UNKNOWN"; break;
    }
    len = (int)strlen(text);
    snprintf(text + len, sizeof(text) - len, ", Pattern detection mode: %s", text_p);
    print(text, 2.0f,  (line - 1)*12.0f + 2.0f, 0, 1);
    line++;
    
    // Window size.
    snprintf(text, sizeof(text), "Drawing into %dx%d window", windowWidth, windowHeight);
    print(text, 2.0f,  (line - 1)*12.0f + 2.0f, 0, 1);
    line++;
    
}

 

接下来,就是令人高潮的 arDetectMarker.c,爱你一万年!

就俩函数:

  • int arDetectMarker( ARHandle *arHandle, ARUint8 *dataPtr );
  • static void confidenceCutoff(ARHandle *arHandle);

Let's go.

int arDetectMarker( ARHandle *arHandle, ARUint8 *dataPtr )
{
    ARdouble    rarea, rlen, rlenmin;
    ARdouble    diff, diffmin;
    int         cid, cdir;
    int         i, j, k;
    int         detectionIsDone = 0;
    int         threshDiff;

#if DEBUG_PATT_GETID
cnt = 0;
#endif

    arHandle->marker_num = 0;
    
    if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_BRACKETING) {
        if (arHandle->arLabelingThreshAutoIntervalTTL > 0) {
            arHandle->arLabelingThreshAutoIntervalTTL--;
        } else {
            int thresholds[3];
            int marker_nums[3];
            
            thresholds[0] = arHandle->arLabelingThresh + arHandle->arLabelingThreshAutoBracketOver;
            if (thresholds[0] > 255) thresholds[0] = 255;
            thresholds[1] = arHandle->arLabelingThresh - arHandle->arLabelingThreshAutoBracketUnder;
            if (thresholds[1] < 0) thresholds[1] = 0;
            thresholds[2] = arHandle->arLabelingThresh;
            
            for (i = 0; i < 3; i++) {
                if (arLabeling(dataPtr, arHandle->xsize, arHandle->ysize, arHandle->arPixelFormat, arHandle->arDebug, arHandle->arLabelingMode, thresholds[i], arHandle->arImageProcMode, &(arHandle->labelInfo), NULL) < 0) return -1;
                if (arDetectMarker2(arHandle->xsize, arHandle->ysize, &(arHandle->labelInfo), arHandle->arImageProcMode, AR_AREA_MAX, AR_AREA_MIN, AR_SQUARE_FIT_THRESH, arHandle->markerInfo2, &(arHandle->marker2_num)) < 0) return -1;
                if (arGetMarkerInfo(dataPtr, arHandle->xsize, arHandle->ysize, arHandle->arPixelFormat, arHandle->markerInfo2, arHandle->marker2_num, arHandle->pattHandle, arHandle->arImageProcMode, arHandle->arPatternDetectionMode, &(arHandle->arParamLT->paramLTf), arHandle->pattRatio, arHandle->markerInfo, &(arHandle->marker_num), arHandle->matrixCodeType) < 0) return -1;
                marker_nums[i] = arHandle->marker_num;
            }

            if (arHandle->arDebug == AR_DEBUG_ENABLE) ARLOGe("Auto threshold (bracket) marker counts -[%3d: %3d] [%3d: %3d] [%3d: %3d]+.\n", thresholds[1], marker_nums[1], thresholds[2], marker_nums[2], thresholds[0], marker_nums[0]);
        
            // If neither of the bracketed values was superior, then change the size of the bracket.
            if (marker_nums[0] <= marker_nums[2] && marker_nums[1] <= marker_nums[2]) {
                if (arHandle->arLabelingThreshAutoBracketOver < arHandle->arLabelingThreshAutoBracketUnder) {
                    arHandle->arLabelingThreshAutoBracketOver++;
                } else if (arHandle->arLabelingThreshAutoBracketOver > arHandle->arLabelingThreshAutoBracketUnder) {
                    arHandle->arLabelingThreshAutoBracketUnder++;
                } else {
                    arHandle->arLabelingThreshAutoBracketOver++;
                    arHandle->arLabelingThreshAutoBracketUnder++;
                }
                if ((thresholds[2] + arHandle->arLabelingThreshAutoBracketOver) >= 255) arHandle->arLabelingThreshAutoBracketOver = 1; // If the bracket has hit the end of the range, reset it.
                if ((thresholds[2] - arHandle->arLabelingThreshAutoBracketOver) <= 0) arHandle->arLabelingThreshAutoBracketUnder = 1; // If a bracket has hit the end of the range, reset it.
                detectionIsDone = 1;
            } else {
                arHandle->arLabelingThresh = (marker_nums[0] >= marker_nums[1] ? thresholds[0] : thresholds[1]);
                threshDiff = arHandle->arLabelingThresh - thresholds[2];
                if (threshDiff > 0) {
                    arHandle->arLabelingThreshAutoBracketOver = threshDiff;
                    arHandle->arLabelingThreshAutoBracketUnder = 1;
                } else {
                    arHandle->arLabelingThreshAutoBracketOver = 1;
                    arHandle->arLabelingThreshAutoBracketUnder = -threshDiff;
                }
                if (arHandle->arDebug == AR_DEBUG_ENABLE) ARLOGe("Auto threshold (bracket) adjusted threshold to %d.\n", arHandle->arLabelingThresh);
            }
            arHandle->arLabelingThreshAutoIntervalTTL = arHandle->arLabelingThreshAutoInterval;
        }
    }
    
    if (!detectionIsDone) {
#if !AR_DISABLE_THRESH_MODE_AUTO_ADAPTIVE
        if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE) {
            
            int ret;
            ret = arImageProcLumaHistAndBoxFilterWithBias(arHandle->arImageProcInfo, dataPtr,  AR_LABELING_THRESH_ADAPTIVE_KERNEL_SIZE_DEFAULT, AR_LABELING_THRESH_ADAPTIVE_BIAS_DEFAULT);
            if (ret < 0) return (ret);
            
            ret = arLabeling(arHandle->arImageProcInfo->image, arHandle->arImageProcInfo->imageX, arHandle->arImageProcInfo->imageY,
                             AR_PIXEL_FORMAT_MONO, arHandle->arDebug, arHandle->arLabelingMode,
                             0, AR_IMAGE_PROC_FRAME_IMAGE,
                             &(arHandle->labelInfo), arHandle->arImageProcInfo->image2);
            if (ret < 0) return (ret);
            
        } else { // !adaptive
#endif
            
            if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_MEDIAN || arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_OTSU) {
                // Do an auto-threshold operation.
                if (arHandle->arLabelingThreshAutoIntervalTTL > 0) {
                    arHandle->arLabelingThreshAutoIntervalTTL--;
                } else {
                    int ret;
                    unsigned char value;
                    if (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_MEDIAN) ret = arImageProcLumaHistAndCDFAndMedian(arHandle->arImageProcInfo, dataPtr, &value);
                    else ret = arImageProcLumaHistAndOtsu(arHandle->arImageProcInfo, dataPtr, &value);
                    if (ret < 0) return (ret);
                    if (arHandle->arDebug == AR_DEBUG_ENABLE && arHandle->arLabelingThresh != value) ARLOGe("Auto threshold (%s) adjusted threshold to %d.\n", (arHandle->arLabelingThreshMode == AR_LABELING_THRESH_MODE_AUTO_MEDIAN ? "median" : "Otsu"), value);
                    arHandle->arLabelingThresh = value;
                    arHandle->arLabelingThreshAutoIntervalTTL = arHandle->arLabelingThreshAutoInterval;
                }
            }
            
            if( arLabeling(dataPtr, arHandle->xsize, arHandle->ysize,
                           arHandle->arPixelFormat, arHandle->arDebug, arHandle->arLabelingMode,
                           arHandle->arLabelingThresh, arHandle->arImageProcMode,
                           &(arHandle->labelInfo), NULL) < 0 ) {
                return -1;
            }
            
#if !AR_DISABLE_THRESH_MODE_AUTO_ADAPTIVE
        }
#endif
        
        if( arDetectMarker2( arHandle->xsize, arHandle->ysize,
                            &(arHandle->labelInfo), arHandle->arImageProcMode,
                            AR_AREA_MAX, AR_AREA_MIN, AR_SQUARE_FIT_THRESH,
                            arHandle->markerInfo2, &(arHandle->marker2_num) ) < 0 ) {
            return -1;
        }
        
        if( arGetMarkerInfo(dataPtr, arHandle->xsize, arHandle->ysize, arHandle->arPixelFormat,
                            arHandle->markerInfo2, arHandle->marker2_num,
                            arHandle->pattHandle, arHandle->arImageProcMode,
                            arHandle->arPatternDetectionMode, &(arHandle->arParamLT->paramLTf), arHandle->pattRatio,
                            arHandle->markerInfo, &(arHandle->marker_num),
                            arHandle->matrixCodeType ) < 0 ) {
            return -1;
        }
    } // !detectionIsDone
    
    // If history mode is not enabled, just perform a basic confidence cutoff.
    if (arHandle->arMarkerExtractionMode == AR_NOUSE_TRACKING_HISTORY) {
        confidenceCutoff(arHandle);
        return 0;
    }

/*------------------------------------------------------------*/

    // For all history records, check every identified marker, to see if the position and size of the marker
    // as recorded in the history record is very similar to one of the identified markers.
    // If it is, and the history record has a higher confidence value, then use the pattern matching
    // information (marker ID, confidence, and direction) info from the history instead.
    for( i = 0; i < arHandle->history_num; i++ ) {
        rlenmin = 0.5;
        cid = -1;
        for( j = 0; j < arHandle->marker_num; j++ ) {
            rarea = (ARdouble)arHandle->history[i].marker.area / (ARdouble)arHandle->markerInfo[j].area;
            if( rarea < 0.7 || rarea > 1.43 ) continue;
            rlen = ( (arHandle->markerInfo[j].pos[0] - arHandle->history[i].marker.pos[0])
                   * (arHandle->markerInfo[j].pos[0] - arHandle->history[i].marker.pos[0])
                   + (arHandle->markerInfo[j].pos[1] - arHandle->history[i].marker.pos[1])
                   * (arHandle->markerInfo[j].pos[1] - arHandle->history[i].marker.pos[1]) )
                   / arHandle->markerInfo[j].area;
            if( rlen < rlenmin ) {
                rlenmin = rlen;
                cid = j;
            }
        }
        if (cid >= 0) {
            if (arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR || arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO || arHandle->arPatternDetectionMode == AR_MATRIX_CODE_DETECTION) {
                if (arHandle->markerInfo[cid].cf < arHandle->history[i].marker.cf) {
                    arHandle->markerInfo[cid].cf = arHandle->history[i].marker.cf;
                    arHandle->markerInfo[cid].id = arHandle->history[i].marker.id;
                    diffmin = 10000.0 * 10000.0;
                    cdir = -1;
                    for( j = 0; j < 4; j++ ) {
                        diff = 0;
                        for( k = 0; k < 4; k++ ) {
                            diff += (arHandle->history[i].marker.vertex[k][0] - arHandle->markerInfo[cid].vertex[(j+k)%4][0])
                            * (arHandle->history[i].marker.vertex[k][0] - arHandle->markerInfo[cid].vertex[(j+k)%4][0])
                            + (arHandle->history[i].marker.vertex[k][1] - arHandle->markerInfo[cid].vertex[(j+k)%4][1])
                            * (arHandle->history[i].marker.vertex[k][1] - arHandle->markerInfo[cid].vertex[(j+k)%4][1]);
                        }
                        if( diff < diffmin ) {
                            diffmin = diff;
                            cdir = (arHandle->history[i].marker.dir - j + 4) % 4;
                        }
                    }
                    arHandle->markerInfo[cid].dir = cdir;
                    // Copy the id, cf, and dir back to the appropriate mode-dependent values too.
                    if (arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR || arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO) {
                        arHandle->markerInfo[cid].idPatt  = arHandle->markerInfo[cid].id;
                        arHandle->markerInfo[cid].cfPatt  = arHandle->markerInfo[cid].cf;
                        arHandle->markerInfo[cid].dirPatt = arHandle->markerInfo[cid].dir;
                    } else {
                        arHandle->markerInfo[cid].idMatrix  = arHandle->markerInfo[cid].id;
                        arHandle->markerInfo[cid].cfMatrix  = arHandle->markerInfo[cid].cf;
                        arHandle->markerInfo[cid].dirMatrix = arHandle->markerInfo[cid].dir;
                    }
                }
            } else if (arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || arHandle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX) {
                if (arHandle->markerInfo[cid].cfPatt < arHandle->history[i].marker.cfPatt || arHandle->markerInfo[cid].cfMatrix < arHandle->history[i].marker.cfMatrix) {
                    arHandle->markerInfo[cid].cfPatt = arHandle->history[i].marker.cfPatt;
                    arHandle->markerInfo[cid].idPatt = arHandle->history[i].marker.idPatt;
                    arHandle->markerInfo[cid].cfMatrix = arHandle->history[i].marker.cfMatrix;
                    arHandle->markerInfo[cid].idMatrix = arHandle->history[i].marker.idMatrix;
                    diffmin = 10000.0 * 10000.0;
                    cdir = -1;
                    for( j = 0; j < 4; j++ ) {
                        diff = 0;
                        for( k = 0; k < 4; k++ ) {
                            diff += (arHandle->history[i].marker.vertex[k][0] - arHandle->markerInfo[cid].vertex[(j+k)%4][0])
                            * (arHandle->history[i].marker.vertex[k][0] - arHandle->markerInfo[cid].vertex[(j+k)%4][0])
                            + (arHandle->history[i].marker.vertex[k][1] - arHandle->markerInfo[cid].vertex[(j+k)%4][1])
                            * (arHandle->history[i].marker.vertex[k][1] - arHandle->markerInfo[cid].vertex[(j+k)%4][1]);
                        }
                        if( diff < diffmin ) {
                            diffmin = diff;
                            cdir = j;
                        }
                    }
                    arHandle->markerInfo[cid].dirPatt   = (arHandle->history[i].marker.dirPatt   - cdir + 4) % 4;
                    arHandle->markerInfo[cid].dirMatrix = (arHandle->history[i].marker.dirMatrix - cdir + 4) % 4;
                }
            }
            else return -1; // Unsupported arPatternDetectionMode.
        } // cid >= 0
    }

    confidenceCutoff(arHandle);

    // Age all history records (and expire old records, i.e. where count >= 4).
    for( i = j = 0; i < arHandle->history_num; i++ ) {
        arHandle->history[i].count++;
        if( arHandle->history[i].count < 4 ) {
            if (i != j) arHandle->history[j] = arHandle->history[i];
            j++;
        }
    }
    arHandle->history_num = j;

    // Save current marker info in history.
    for( i = 0; i < arHandle->marker_num; i++ ) {
        if( arHandle->markerInfo[i].id < 0 ) continue;

        // Check if an ARTrackingHistory record already exists for this marker ID.
        for( j = 0; j < arHandle->history_num; j++ ) {
            if( arHandle->history[j].marker.id == arHandle->markerInfo[i].id ) break;
        }
        if( j == arHandle->history_num ) { // If a pre-existing ARTrackingHistory record was not found,
            if( arHandle->history_num == AR_SQUARE_MAX ) break; // exit if we've filled all available history slots,
            arHandle->history_num++; // Otherwise count the newly created record.
        }
        arHandle->history[j].marker = arHandle->markerInfo[i]; // Save the marker info.
        arHandle->history[j].count  = 1; // Reset count to indicate info is fresh.
    }

    if( arHandle->arMarkerExtractionMode == AR_USE_TRACKING_HISTORY_V2 ) {
        return 0;
    }


    for( i = 0; i < arHandle->history_num; i++ ) {
        for( j = 0; j < arHandle->marker_num; j++ ) {
            rarea = (ARdouble)arHandle->history[i].marker.area / (ARdouble)arHandle->markerInfo[j].area;
            if( rarea < 0.7 || rarea > 1.43 ) continue;
            rlen = ( (arHandle->markerInfo[j].pos[0] - arHandle->history[i].marker.pos[0])
                   * (arHandle->markerInfo[j].pos[0] - arHandle->history[i].marker.pos[0])
                   + (arHandle->markerInfo[j].pos[1] - arHandle->history[i].marker.pos[1])
                   * (arHandle->markerInfo[j].pos[1] - arHandle->history[i].marker.pos[1]) )
                   / arHandle->markerInfo[j].area;
            if( rlen < 0.5 ) break;
        }
        if( j == arHandle->marker_num ) {
            arHandle->markerInfo[arHandle->marker_num] = arHandle->history[i].marker;
            arHandle->marker_num++;
        }
    }

    return 0;
}
arDetectMarker

 

  

2. nftSimple -- Linux版本 

static void mainLoop(void)
{
    static int ms_prev;
    int ms;
    float s_elapsed;
    ARUint8 *image;

    // NFT results.
    static int detectedPage = -2; // -2 Tracking not inited, -1 tracking inited OK, >= 0 tracking online on page.
    static float trackingTrans[3][4];
    

    int             i, j, k;
    
    // Find out how long since mainLoop() last ran.
    ms = glutGet(GLUT_ELAPSED_TIME);
    s_elapsed = (float)(ms - ms_prev) * 0.001f;
    if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
    ms_prev = ms;
    
    // Update drawing.
    DrawCubeUpdate(s_elapsed);
    
    // Grab a video frame.
    if ((image = arVideoGetImage()) != NULL) {
gARTImage
= image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Run marker detection on frame if (threadHandle) { // Perform NFT tracking. float err; int ret; int pageNo; if( detectedPage == -2 ) { trackingInitStart( threadHandle, gARTImage );  //--> (1) detectedPage = -1; } if( detectedPage == -1 ) { ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo);  //--> (2) identify object if( ret == 1 ) { if (pageNo >= 0 && pageNo < surfaceSetCount) { ARLOGd("Detected page %d.\n", pageNo); detectedPage = pageNo; ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans);  //--> (3) } else { ARLOGe("Detected bad page %d.\n", pageNo); detectedPage = -2; } } else if( ret < 0 ) { ARLOGd("No page detected.\n"); detectedPage = -2; } } if( detectedPage >= 0 && detectedPage < surfaceSetCount) { if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) {  //--> track object ARLOGd("Tracking lost.\n"); detectedPage = -2; } else { ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1); } } } else { ARLOGe("Error: threadHandle\n"); detectedPage = -2; } // Update markers. for (i = 0; i < markersNFTCount; i++) { markersNFT[i].validPrev = markersNFT[i].valid; if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) { markersNFT[i].valid = TRUE; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k]; } else markersNFT[i].valid = FALSE; if (markersNFT[i].valid) { // Filter the pose estimate. if (markersNFT[i].ftmi) { if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersNFT[i].validPrev) { // Marker has become visible, tell any dependent objects. // ---> } // We have a new pose, so set that. arglCameraViewRH((const ARdouble (*)[4])markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR); // Tell any dependent objects about the update. // ---> } else { if (markersNFT[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. // ---> } } } // Tell GLUT the display has changed. glutPostRedisplay(); } }

 

 

(1)

int trackingInitStart( THREAD_HANDLE_T *threadHandle, ARUint8 *imagePtr )
{
    TrackingInitHandle     *trackingInitHandle;

    if (!threadHandle || !imagePtr) {
        ARLOGe("trackingInitStart(): Error: NULL threadHandle or imagePtr.\n");
        return (-1);
    }
    
    trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
    if (!trackingInitHandle) {
        ARLOGe("trackingInitStart(): Error: NULL trackingInitHandle.\n");
        return (-1);
    }
    memcpy( trackingInitHandle->imagePtr, imagePtr, trackingInitHandle->imageSize );
   // 填充 *threadHandle,貌似就复制了个图
   // 填好了给threadStartSignal,设置好进程信号
threadStartSignal( threadHandle ); return 0; }

 

(2)

typedef struct {
    KpmHandle              *kpmHandle;      // KPM-related data.
    ARUint8                *imagePtr;       // Pointer to image being tracked.
    int                     imageSize;      // Bytes per image.
    float                   trans[3][4];    // Transform containing pose of tracked image.
    int                     page;           // Assigned page number of tracked image.
    int                     flag;           // Tracked successfully.
} TrackingInitHandle;
THREAD_HANDLE_T
int trackingInitGetResult( THREAD_HANDLE_T *threadHandle, float trans[3][4], int *page )
{
    TrackingInitHandle     *trackingInitHandle;
    int  i, j;

    if (!threadHandle || !trans || !page)  {
        ARLOGe("trackingInitGetResult(): Error: NULL threadHandle or trans or page.\n");
        return (-1);
    }
    
    if( threadGetStatus( threadHandle ) == 0 ) return 0;
    threadEndWait( threadHandle );
    trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
    if (!trackingInitHandle) return (-1);
    if( trackingInitHandle->flag ) {
        for (j = 0; j < 3; j++) {
  for (i = 0; i < 4; i++)
  trans[j][i] = trackingInitHandle->trans[j][i];  //感觉关键部分是另一个thread干的,这个thread就接收下结果
}
*page = trackingInitHandle->page; return 1; } return -1; }

 

(3)

int ar2SetInitTrans( AR2SurfaceSetT *surfaceSet, float  trans[3][4] )
{
    int    i, j;

    if( surfaceSet == NULL ) return -1;
    surfaceSet->contNum = 1;
    for( j = 0; j < 3; j++ ) {
        for( i = 0; i < 4; i++ ) 
  surfaceSet->trans1[j][i] = trans[j][i]; } surfaceSet->prevFeature[0].flag = -1; return 0; }

 

接下来,跟踪“干活儿”的那个线程。


 

int main(int argc, char** argv)
{
    char glutGamemode[32];
    const char *cparam_name = "Data2/camera_para.dat";
    char vconf[] = "";
    const char markerConfigDataFilename[] = "Data2/markers.dat";
    
#ifdef DEBUG
    arLogLevel = AR_LOG_LEVEL_DEBUG;
#endif
    
    //
    // Library inits.
    //
    
    glutInit(&argc, argv);
    
    //
    // Video setup.
    //
    
#ifdef _WIN32
    CoInitialize(NULL);
#endif
    
    if (!setupCamera(cparam_name, vconf, &gCparamLT)) {
        ARLOGe("main(): Unable to set up AR camera.\n");
        exit(-1);
    }
    
    //
    // AR init.
    //
    
    // Create the OpenGL projection from the calibrated camera parameters.
    arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens);
    
    if (!initNFT(gCparamLT, arVideoGetPixelFormat())) {  // --> (1)
        ARLOGe("main(): Unable to init NFT.\n");
        exit(-1);
    }

    //
    // Graphics setup.
    //
    
    // Set up GL context(s) for OpenGL to draw into.
    glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);
    if (!prefWindowed) {
        if (prefRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh);
        else sprintf(glutGamemode, "%ix%i:%i", prefWidth, prefHeight, prefDepth);
        glutGameModeString(glutGamemode);
        glutEnterGameMode();
    } else {
        glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize);
        glutCreateWindow(argv[0]);
    }
    
    // Setup ARgsub_lite library for current OpenGL context.
    if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) {
        ARLOGe("main(): arglSetupForCurrentContext() returned error.\n");
        cleanup();
        exit(-1);
    }
    arUtilTimerReset();
    
    //
    // Markers setup.
    //
    
    // Load marker(s).
    newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount);  // --> (2)
    if (!markersNFTCount) {
        ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename);
        cleanup();
        exit(-1);
    }
    ARLOGi("Marker count = %d\n", markersNFTCount);
    
    // Marker data has been loaded, so now load NFT data.
    if (!loadNFTData()) {                              // --> (3)
        ARLOGe("Error loading NFT data.\n");
        cleanup();
        exit(-1);
    }    
    
    // Start the video.
    if (arVideoCapStart() != 0) {
        ARLOGe("setupCamera(): Unable to begin camera data capture.\n");
        return (FALSE);
    }
    
    // Register GLUT event-handling callbacks.
    // NB: mainLoop() is registered by Visibility.
    glutDisplayFunc(Display);
    glutReshapeFunc(Reshape);
    glutVisibilityFunc(Visibility);
    glutKeyboardFunc(Keyboard);
    
    glutMainLoop();

    return (0);
}

 

 

(1)

// Modifies globals: kpmHandle, ar2Handle.
static int initNFT(ARParamLT *cparamLT, AR_PIXEL_FORMAT pixFormat)
{
    ARLOGd("Initialising NFT.\n");
    //
    // NFT init.
    //
    
    // KPM init. (Key Point Matching)
    kpmHandle = kpmCreateHandle(cparamLT, pixFormat);
    if (!kpmHandle) {
        ARLOGe("Error: kpmCreateHandle.\n");
        return (FALSE);
    }
    //kpmSetProcMode( kpmHandle, KpmProcHalfSize );
    
    // AR2 init.
    if( (ar2Handle = ar2CreateHandle(cparamLT, pixFormat, AR2_TRACKING_DEFAULT_THREAD_NUM)) == NULL ) {
        ARLOGe("Error: ar2CreateHandle.\n");
        kpmDeleteHandle(&kpmHandle);
        return (FALSE);
    }
if (threadGetCPU() <= 1) { ARLOGi("Using NFT tracking settings for a single CPU.\n"); ar2SetTrackingThresh(ar2Handle, 5.0); ar2SetSimThresh(ar2Handle, 0.50); ar2SetSearchFeatureNum(ar2Handle, 16); ar2SetSearchSize(ar2Handle, 6); ar2SetTemplateSize1(ar2Handle, 6); ar2SetTemplateSize2(ar2Handle, 6); } else { ARLOGi("Using NFT tracking settings for more than one CPU.\n"); ar2SetTrackingThresh(ar2Handle, 5.0); ar2SetSimThresh(ar2Handle, 0.50); ar2SetSearchFeatureNum(ar2Handle, 16); ar2SetSearchSize(ar2Handle, 12); ar2SetTemplateSize1(ar2Handle, 6); ar2SetTemplateSize2(ar2Handle, 6); } // NFT dataset loading will happen later. return (TRUE); }

 

(2)

typedef struct _ARMarkerNFT {
    // ARMarker protected 
    bool       valid;
    bool       validPrev;
    ARdouble   trans[3][4];
    ARPose     pose;
    ARdouble   marker_width;
    ARdouble   marker_height;
    // ARMarker private
    ARFilterTransMatInfo *ftmi;
    ARdouble   filterCutoffFrequency;
    ARdouble   filterSampleRate;
    // ARMarkerNFT
    int        pageNo;
    char      *datasetPathname;
} ARMarkerNFT;
ARMarkerNFT
void newMarkers(const char *markersConfigDataFilePathC, ARMarkerNFT **markersNFT_out, int *markersNFTCount_out)
{
    FILE          *fp;
    char           buf[MAXPATHLEN], buf1[MAXPATHLEN];
    int            tempI;
    ARMarkerNFT   *markersNFT;
    int            markersNFTCount;
    ARdouble       tempF;
    int            i;
    char           markersConfigDataDirC[MAXPATHLEN];
    size_t         markersConfigDataDirCLen;

    if (!markersConfigDataFilePathC || markersConfigDataFilePathC[0] == '\0' || !markersNFT_out || !markersNFTCount_out) return;
        
    // Load the marker data file.
    ARLOGd("Opening marker config. data file from path '%s'.\n", markersConfigDataFilePathC);
    arUtilGetDirectoryNameFromPath(markersConfigDataDirC, markersConfigDataFilePathC, MAXPATHLEN, 1); // 1 = add '/' at end.
    markersConfigDataDirCLen = strlen(markersConfigDataDirC);
    if ((fp = fopen(markersConfigDataFilePathC, "r")) == NULL) {
        ARLOGe("Error: unable to locate marker config data file '%s'.\n", markersConfigDataFilePathC);
        return;
    }
    
    // First line is number of markers to read.
    get_buff(buf, MAXPATHLEN, fp, 1);if (sscanf(buf, "%d", &tempI) != 1 ) {
        ARLOGe("Error in marker configuration data file; expected marker count.\n");
        fclose(fp);
        return;
    }
    
    arMallocClear(markersNFT, ARMarkerNFT, tempI);
    markersNFTCount = tempI;
    
    ARLOGd("Reading %d marker configuration(s).\n", markersNFTCount);

    for (i = 0; i < markersNFTCount; i++) {
        
        // Read marker name.
        if (!get_buff(buf, MAXPATHLEN, fp, 1)) {
            ARLOGe("Error in marker configuration data file; expected marker name.\n");
            break;
        }
        
        // Read marker type.
        if (!get_buff(buf1, MAXPATHLEN, fp, 1)) {
            ARLOGe("Error in marker configuration data file; expected marker type.\n");
            break;
        }
        
        // Interpret marker type, and read more data.
        if (strcmp(buf1, "SINGLE") == 0) {
            ARLOGe("Error in marker configuration data file; SINGLE markers not supported in this build.\n");
        } else if (strcmp(buf1, "MULTI") == 0) {
            ARLOGe("Error in marker configuration data file; MULTI markers not supported in this build.\n");
        } else if (strcmp(buf1, "NFT") == 0) {
markersNFT[i].valid
= markersNFT[i].validPrev = FALSE; arMalloc(markersNFT[i].datasetPathname, char, markersConfigDataDirCLen + strlen(buf) + 1); strcpy( markersNFT[i].datasetPathname, markersConfigDataDirC); strcpy( markersNFT[i].datasetPathname + markersConfigDataDirCLen, buf); markersNFT[i].pageNo = -1;
}
else { ARLOGe("Error in marker configuration data file; unsupported marker type %s.\n", buf1); } // Look for optional tokens. A blank line marks end of options. while (get_buff(buf, MAXPATHLEN, fp, 0) && (buf[0] != '\0')) { if (strncmp(buf, "FILTER", 6) == 0) { markersNFT[i].filterCutoffFrequency = AR_FILTER_TRANS_MAT_CUTOFF_FREQ_DEFAULT; markersNFT[i].filterSampleRate = AR_FILTER_TRANS_MAT_SAMPLE_RATE_DEFAULT;
if (strlen(buf) != 6) { if (sscanf(&buf[6], #ifdef ARDOUBLE_IS_FLOAT "%f" #else "%lf" #endif , &tempF) == 1) markersNFT[i].filterCutoffFrequency = tempF; } markersNFT[i].ftmi = arFilterTransMatInit(markersNFT[i].filterSampleRate, markersNFT[i].filterCutoffFrequency); } // Unknown tokens are ignored. } } fclose(fp); // If not all markers were read, an error occurred. if (i < markersNFTCount) { // Clean up. for (; i >= 0; i--) { if (markersNFT[i].datasetPathname) free(markersNFT[i].datasetPathname); if (markersNFT[i].ftmi) arFilterTransMatFinal(markersNFT[i].ftmi); } free(markersNFT); *markersNFTCount_out = 0; *markersNFT_out = NULL; return; } *markersNFTCount_out = markersNFTCount; *markersNFT_out = markersNFT; }

 

(3)

// References globals: markersNFTCount
// Modifies globals: threadHandle, surfaceSet[], surfaceSetCount, markersNFT[]
static int loadNFTData(void)
{
    int i;
    KpmRefDataSet *refDataSet;
    
    // If data was already loaded, stop KPM tracking thread and unload previously loaded data.
    if (threadHandle) {
        ARLOGi("Reloading NFT data.\n");
        unloadNFTData();
    } else {
        ARLOGi("Loading NFT data.\n");
    }
    
    refDataSet = NULL;
    
    for (i = 0; i < markersNFTCount; i++) {
        // Load KPM data.
        KpmRefDataSet  *refDataSet2;
        ARLOGi("Reading %s.fset3\n", markersNFT[i].datasetPathname);
        if (kpmLoadRefDataSet(markersNFT[i].datasetPathname, "fset3", &refDataSet2) < 0 ) {
            ARLOGe("Error reading KPM data from %s.fset3\n", markersNFT[i].datasetPathname);
            markersNFT[i].pageNo = -1;
            continue;
        }
        markersNFT[i].pageNo = surfaceSetCount;
        ARLOGi("  Assigned page no. %d.\n", surfaceSetCount);
        if (kpmChangePageNoOfRefDataSet(refDataSet2, KpmChangePageNoAllPages, surfaceSetCount) < 0) {
            ARLOGe("Error: kpmChangePageNoOfRefDataSet\n");
            exit(-1);
        }
        if (kpmMergeRefDataSet(&refDataSet, &refDataSet2) < 0) {
            ARLOGe("Error: kpmMergeRefDataSet\n");
            exit(-1);
        }
        ARLOGi("  Done.\n");
        
        // Load AR2 data.
        ARLOGi("Reading %s.fset\n", markersNFT[i].datasetPathname);
        
        if ((surfaceSet[surfaceSetCount] = ar2ReadSurfaceSet(markersNFT[i].datasetPathname, "fset", NULL)) == NULL ) {
            ARLOGe("Error reading data from %s.fset\n", markersNFT[i].datasetPathname);
        }
        ARLOGi("  Done.\n");
        
        surfaceSetCount++;
        if (surfaceSetCount == PAGES_MAX) break;
    }
    if (kpmSetRefDataSet(kpmHandle, refDataSet) < 0) {
        ARLOGe("Error: kpmSetRefDataSet\n");
        exit(-1);
    }
    kpmDeleteRefDataSet(&refDataSet);
    
    // Start the KPM tracking thread.
    threadHandle = trackingInitInit(kpmHandle);  // --> (4)
    if (!threadHandle) exit(-1);

    ARLOGi("Loading of NFT data complete.\n");
    return (TRUE);
}

 

(4)

THREAD_HANDLE_T *trackingInitInit( KpmHandle *kpmHandle )
{
    TrackingInitHandle  *trackingInitHandle;
    THREAD_HANDLE_T     *threadHandle;

    if (!kpmHandle) {
        ARLOGe("trackingInitInit(): Error: NULL KpmHandle.\n");
        return (NULL);
    }
    
    trackingInitHandle = (TrackingInitHandle *)malloc(sizeof(TrackingInitHandle));
    if( trackingInitHandle == NULL ) return NULL;
    trackingInitHandle->kpmHandle = kpmHandle;
    trackingInitHandle->imageSize = kpmHandleGetXSize(kpmHandle) * kpmHandleGetYSize(kpmHandle) * arUtilGetPixelSize(kpmHandleGetPixelFormat(kpmHandle));
    trackingInitHandle->imagePtr  = (ARUint8 *)malloc(trackingInitHandle->imageSize);
    trackingInitHandle->flag      = 0;

    threadHandle = threadInit(0, trackingInitHandle, trackingInitMain);
    return threadHandle;
}
typedef struct {
    KpmHandle              *kpmHandle;      // KPM-related data.
    ARUint8                *imagePtr;       // Pointer to image being tracked.
    int                     imageSize;      // Bytes per image.
    float                   trans[3][4];    // Transform containing pose of tracked image.
    int                     page;           // Assigned page number of tracked image.
    int                     flag;           // Tracked successfully.
} TrackingInitHandle;
THREAD_HANDLE_T *threadInit( int ID, void *arg, void *(*start_routine)(THREAD_HANDLE_T*) )
{
    THREAD_HANDLE_T    *flag;
    int err;
#if !defined(_WINRT) && !defined(ARUTIL_DISABLE_PTHREADS)
    pthread_t           thread;
    pthread_attr_t      attr;
#endif
    if ((flag = malloc(sizeof(THREAD_HANDLE_T))) == NULL) return NULL;

    flag->ID     = ID;
    flag->startF = 0;
    flag->endF   = 0;
    flag->busyF  = 0;
    flag->arg    = arg;
    pthread_mutex_init( &(flag->mut), NULL );
    pthread_cond_init( &(flag->cond1), NULL );
    pthread_cond_init( &(flag->cond2), NULL );

#if !defined(_WINRT) && !defined(ARUTIL_DISABLE_PTHREADS)
    pthread_attr_init(&attr);  // 初始化一个线程对象的属性
    pthread_attr_setdetachstate(&attr, 1); // Preclude the need to do pthread_join on the thread after it exits.
    err = pthread_create(&thread, &attr, (void *(*)(void*))start_routine, flag);  // --> (5)
    pthread_attr_destroy(&attr);
#elif defined(_WIN32)
#  ifdef _WINRT
    err = arCreateDetachedThreadWinRT(start_routine, flag);
#  else
    struct start_routine_proxy_arg *srpa_p = malloc(sizeof(struct start_routine_proxy_arg));
    srpa_p->start_routine = start_routine;
    srpa_p->arg = flag;
    err = (_beginthread(start_routine_proxy, 0, srpa_p) == -1L);
#  endif
#else
#  error No routine available to create a thread.
#endif
if (err == 0) { return flag; } else { threadFree(&flag); return NULL; } }

 

(5)

static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
{
    TrackingInitHandle     *trackingInitHandle;
    KpmHandle              *kpmHandle;
    KpmResult              *kpmResult = NULL;
    int                     kpmResultNum;
    ARUint8                *imagePtr;
    float                   err;
    int                     i, j, k;

    if (!threadHandle) {
        ARLOGe("Error starting tracking thread: empty THREAD_HANDLE_T.\n");
        return (NULL);
    }
    trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
    if (!threadHandle) {
        ARLOGe("Error starting tracking thread: empty trackingInitHandle.\n");
        return (NULL);
    }
    kpmHandle          = trackingInitHandle->kpmHandle;
    imagePtr           = trackingInitHandle->imagePtr;
    if (!kpmHandle || !imagePtr) {
        ARLOGe("Error starting tracking thread: empty kpmHandle/imagePtr.\n");
        return (NULL);
    }
    ARLOGi("Start tracking thread.\n");
    
    kpmGetResult( kpmHandle, &kpmResult, &kpmResultNum );  // 单独提取出kpmHandle中的两个元素 for 方便

   /* 总算找到了你!下章见。 */
for(;;)
{
if( threadStartWait(threadHandle) < 0 ) break; kpmMatching(kpmHandle, imagePtr); trackingInitHandle->flag = 0; for( i = 0; i < kpmResultNum; i++ )
{
if( kpmResult[i].camPoseF != 0 ) continue; ARLOGd("kpmGetPose OK.\n");
if( trackingInitHandle->flag == 0 || err > kpmResult[i].error )
{ // Take the first or best result. trackingInitHandle->flag = 1; trackingInitHandle->page = kpmResult[i].pageNo; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k]; err = kpmResult[i].error; } } threadEndSignal(threadHandle); } ARLOGi("End tracking thread.\n"); return (NULL); }

 

见:[Artoolkit] kpmMatching Analysis of nftSimple 

posted @ 2017-02-17 16:40  郝壹贰叁  阅读(850)  评论(0编辑  收藏  举报