本文整理汇总了C++中ARLOGe函数的典型用法代码示例。如果您正苦于以下问题:C++ ARLOGe函数的具体用法?C++ ARLOGe怎么用?C++ ARLOGe使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了ARLOGe函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: loadNFTData
// References globals: markersNFTCount
// Modifies globals: threadHandle, surfaceSet[], surfaceSetCount, markersNFT[]
static int loadNFTData(void)
{
int i;
KpmRefDataSet *refDataSet;
// If data was already loaded, stop KPM tracking thread and unload previously loaded data.
if (threadHandle) {
ARLOGi("Reloading NFT data.\n");
unloadNFTData();
} else {
ARLOGi("Loading NFT data.\n");
}
refDataSet = NULL;
for (i = 0; i < markersNFTCount; i++) {
// Load KPM data.
KpmRefDataSet *refDataSet2;
ARLOGi("Reading %s.fset3\n", markersNFT[i].datasetPathname);
if (kpmLoadRefDataSet(markersNFT[i].datasetPathname, "fset3", &refDataSet2) < 0 ) {
ARLOGe("Error reading KPM data from %s.fset3\n", markersNFT[i].datasetPathname);
markersNFT[i].pageNo = -1;
continue;
}
markersNFT[i].pageNo = surfaceSetCount;
ARLOGi(" Assigned page no. %d.\n", surfaceSetCount);
if (kpmChangePageNoOfRefDataSet(refDataSet2, KpmChangePageNoAllPages, surfaceSetCount) < 0) {
ARLOGe("Error: kpmChangePageNoOfRefDataSet\n");
exit(-1);
}
if (kpmMergeRefDataSet(&refDataSet, &refDataSet2) < 0) {
ARLOGe("Error: kpmMergeRefDataSet\n");
exit(-1);
}
ARLOGi(" Done.\n");
// Load AR2 data.
ARLOGi("Reading %s.fset\n", markersNFT[i].datasetPathname);
if ((surfaceSet[surfaceSetCount] = ar2ReadSurfaceSet(markersNFT[i].datasetPathname, "fset", NULL)) == NULL ) {
ARLOGe("Error reading data from %s.fset\n", markersNFT[i].datasetPathname);
}
ARLOGi(" Done.\n");
surfaceSetCount++;
if (surfaceSetCount == PAGES_MAX) break;
}
if (kpmSetRefDataSet(kpmHandle, refDataSet) < 0) {
ARLOGe("Error: kpmSetRefDataSet\n");
exit(-1);
}
kpmDeleteRefDataSet(&refDataSet);
// Start the KPM tracking thread.
threadHandle = trackingInitInit(kpmHandle);
if (!threadHandle) exit(-1);
ARLOGi("Loading of NFT data complete.\n");
return (TRUE);
}
示例2: onFrameBuffer
virtual bool onFrameBuffer(void *buffer, int bufferSize)
{
int frameIndex;
bool ret;
if (!isConnected() || !buffer || bufferSize <= 0)
{
ARLOGe("Error: onFrameBuffer() called while not connected, or called without frame.\n");
return false;
}
ret = true;
m_framesReceived++;
pthread_mutex_lock(&m_nc->frameLock);
if (m_nc->frameBuffers[0] && m_nc->frameBuffers[1]) // Only do copy if capture has been started.
{
if (bufferSize != m_nc->frameBufferLength)
{
ARLOGe("Error: onFrameBuffer frame size is %d but receiver expected %d.\n", bufferSize, m_nc->frameBufferLength);
ret = false;
}
else
{
// Find a buffer to write to. Any buffer not locked by client is a candidate.
if (m_nc->frameBuffersStatus[0] != LOCKED)
frameIndex = 0;
else if (m_nc->frameBuffersStatus[1] != LOCKED)
frameIndex = 1;
else
frameIndex = -1;
if (frameIndex == -1)
{
ARLOGe("Error: onFrameBuffer receiver was all full up.\n");
ret = false;
}
else
{
ARLOGd("FRAME => buffer %d %p\n", frameIndex, m_nc->frameBuffers[frameIndex]);
memcpy(m_nc->frameBuffers[frameIndex], buffer, bufferSize);
m_nc->frameBuffersStatus[frameIndex] = READY;
if (m_nc->frameReadyCallback)
pthread_cond_signal(&m_nc->frameReadyNotifierThreadCondGo);
}
}
}
else
{
ARLOGd("FRAME =X\n");
}
pthread_mutex_unlock(&m_nc->frameLock);
return ret;
}
示例3: errorWMC
static void errorWMC(void *userdata)
{
if (!userdata) {
ARLOGe("Windows.Media.Capture error but no userdata suppplied.\n");
return;
}
AR2VideoParamWinMCT *vid = (AR2VideoParamWinMCT *)userdata;
ARLOGe("Windows.Media.Capture error.\n");
stopWMC(vid);
}
示例4: debugReportMode
// Report state of ARToolKit tracker.
static void debugReportMode(ARGViewportHandle *vp)
{
if (vp->dispMethod == AR_GL_DISP_METHOD_GL_DRAW_PIXELS) {
ARLOGe("dispMode (d) : GL_DRAW_PIXELS\n");
} else if (vp->dispMethod == AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME) {
ARLOGe("dispMode (d) : TEXTURE MAPPING (FULL RESOLUTION)\n");
} else {
ARLOGe("dispMode (d) : TEXTURE MAPPING (HALF RESOLUTION)\n");
}
}
示例5: ARLOGe
static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
{
TrackingInitHandle *trackingInitHandle;
KpmHandle *kpmHandle;
KpmResult *kpmResult = NULL;
int kpmResultNum;
ARUint8 *imagePtr;
float err;
int i, j, k;
if (!threadHandle) {
ARLOGe("Error starting tracking thread: empty THREAD_HANDLE_T.\n");
return (NULL);
}
trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
if (!threadHandle) {
ARLOGe("Error starting tracking thread: empty trackingInitHandle.\n");
return (NULL);
}
kpmHandle = trackingInitHandle->kpmHandle;
imagePtr = trackingInitHandle->imagePtr;
if (!kpmHandle || !imagePtr) {
ARLOGe("Error starting tracking thread: empty kpmHandle/imagePtr.\n");
return (NULL);
}
ARLOGi("Start tracking thread.\n");
kpmGetResult( kpmHandle, &kpmResult, &kpmResultNum );
for(;;) {
if( threadStartWait(threadHandle) < 0 ) break;
kpmMatching(kpmHandle, imagePtr);
trackingInitHandle->flag = 0;
for( i = 0; i < kpmResultNum; i++ ) {
if( kpmResult[i].camPoseF != 0 ) continue;
ARLOGd("kpmGetPose OK.\n");
if( trackingInitHandle->flag == 0 || err > kpmResult[i].error ) { // Take the first or best result.
trackingInitHandle->flag = 1;
trackingInitHandle->page = kpmResult[i].pageNo;
for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k];
err = kpmResult[i].error;
}
}
threadEndSignal(threadHandle);
}
ARLOGi("End tracking thread.\n");
return (NULL);
}
示例6: ar2WriteImageSet
int ar2WriteImageSet(char *filename, AR2ImageSetT *imageSet)
{
FILE *fp;
AR2JpegImageT jpegImage;
int i;
size_t len;
const char ext[] = ".iset";
char *buf;
len = strlen(filename) + strlen(ext) + 1; // +1 for nul terminator.
arMalloc(buf, char, len);
sprintf(buf, "%s%s", filename, ext);
if ((fp = fopen(buf, "wb")) == NULL)
{
ARLOGe("Error: unable to open file '%s' for writing.\n", buf);
free(buf);
return (-1);
}
free(buf);
if (fwrite(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1)
goto bailBadWrite;
jpegImage.xsize = imageSet->scale[0]->xsize;
jpegImage.ysize = imageSet->scale[0]->ysize;
jpegImage.dpi = imageSet->scale[0]->dpi;
jpegImage.nc = 1;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
jpegImage.image = imageSet->scale[0]->imgBWBlur[0];
#else
jpegImage.image = imageSet->scale[0]->imgBW;
#endif
if (ar2WriteJpegImage2(fp, &jpegImage, AR2_DEFAULT_JPEG_IMAGE_QUALITY) < 0)
goto bailBadWrite;
for (i = 1; i < imageSet->num; i++)
{
if (fwrite(&(imageSet->scale[i]->dpi), sizeof(imageSet->scale[i]->dpi), 1, fp) != 1)
goto bailBadWrite;
}
fclose(fp);
return 0;
bailBadWrite:
ARLOGe("Error saving image set: error writing data.\n");
fclose(fp);
return (-1);
}
示例7: setupCamera
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p)
{
ARParam cparam;
int xsize, ysize;
AR_PIXEL_FORMAT pixFormat;
// Open the video path.
if (arVideoOpen(vconf) < 0) {
ARLOGe("setupCamera(): Unable to open connection to camera.\n");
return (FALSE);
}
// Find the size of the window.
if (arVideoGetSize(&xsize, &ysize) < 0) {
ARLOGe("setupCamera(): Unable to determine camera frame size.\n");
arVideoClose();
return (FALSE);
}
ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize);
// Get the format in which the camera is returning pixels.
pixFormat = arVideoGetPixelFormat();
if (pixFormat == AR_PIXEL_FORMAT_INVALID) {
ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n");
arVideoClose();
return (FALSE);
}
// Load the camera parameters, resize for the window and init.
if (arParamLoad(cparam_name, 1, &cparam) < 0) {
ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name);
arVideoClose();
return (FALSE);
}
if (cparam.xsize != xsize || cparam.ysize != ysize) {
ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
arParamChangeSize(&cparam, xsize, ysize, &cparam);
}
#ifdef DEBUG
ARLOG("*** Camera Parameter ***\n");
arParamDisp(&cparam);
#endif
if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
arVideoClose();
return (FALSE);
}
return (TRUE);
}
示例8: videoAndroidNativeCaptureStart
bool videoAndroidNativeCaptureStart(VIDEO_ANDROID_NATIVE_CAPTURE *nc, AR_VIDEO_FRAME_READY_CALLBACK callback, void *userdata)
{
int err;
bool ret = true;
ARLOGd("videoAndroidNativeCaptureStart().\n");
if (!nc)
return false;
// Don't start if already started.
if (nc->frameBuffers[0] || nc->frameBuffers[1])
{
ARLOGe("videoAndroidNativeCaptureStart called again.\n");
return false;
}
// Create the frame buffers.
pthread_mutex_lock(&nc->frameLock);
nc->frameBufferLength = (nc->frameWidth * nc->frameHeight * 3) / 2; // Assume NV21/NV12 format.
nc->frameBuffersStatus[0] = nc->frameBuffersStatus[1] = EMPTY;
nc->frameBuffers[0] = (unsigned char*)malloc(nc->frameBufferLength);
nc->frameBuffers[1] = (unsigned char*)malloc(nc->frameBufferLength);
if (!nc->frameBuffers[0] || !nc->frameBuffers[1])
{
ARLOGe("Out of memory!\n");
ret = false;
}
else
{
nc->frameReadyCallback = callback;
if (callback)
{
// Start the frameReadyNotifierThread.
nc->frameReadyCallbackUserdata = userdata;
nc->frameReadyNotifierThreadShouldQuit = false;
if ((err = pthread_create(&(nc->frameReadyNotifierThread), NULL, frameReadyNotifier, (void*)nc)) != 0)
{
ARLOGe("videoAndroidNativeCaptureOpen(): Error %d detaching thread.\n", err);
ret = false;
}
}
}
pthread_mutex_unlock(&nc->frameLock);
ARLOGd("/videoAndroidNativeCaptureStart nc->frameBufferLength=%d.\n", nc->frameBufferLength);
return ret;
}
示例9: glmReadOBJ
bool ARTApp::init(const char *cparamName, const char *pattName, const char *objModelFile, float pattWidth, float modelScale)
{
if (arHandle) //has initialized
return false;
if (!setupCamera(cparamName, "", &cParam, &arHandle, &ar3DHandle)) {
return false;
}
if (!setupMarker(pattName, &pattID, arHandle, &pattHandle)) {
return false;
}
{
objModel = glmReadOBJ((char*)objModelFile);
if (!objModel)
{
ARLOGe("Unable to load obj model file.\n");
return false;
}
glmUnitize(objModel);
glmScale(objModel, pattWidth*modelScale);
}
this->pattWidth = pattWidth;
return true;
}
示例10: ARLOGe
// N.B. This function is duplicated in libARvideo, so that libARvideo doesn't need to
// link to libAR. Therefore, if changes are made here they should be duplicated there.
const char *arUtilGetPixelFormatName(const AR_PIXEL_FORMAT arPixelFormat)
{
const char *names[] = {
"AR_PIXEL_FORMAT_RGB",
"AR_PIXEL_FORMAT_BGR",
"AR_PIXEL_FORMAT_RGBA",
"AR_PIXEL_FORMAT_BGRA",
"AR_PIXEL_FORMAT_ABGR",
"AR_PIXEL_FORMAT_MONO",
"AR_PIXEL_FORMAT_ARGB",
"AR_PIXEL_FORMAT_2vuy",
"AR_PIXEL_FORMAT_yuvs",
"AR_PIXEL_FORMAT_RGB_565",
"AR_PIXEL_FORMAT_RGBA_5551",
"AR_PIXEL_FORMAT_RGBA_4444",
"AR_PIXEL_FORMAT_420v",
"AR_PIXEL_FORMAT_420f",
"AR_PIXEL_FORMAT_NV21"
};
if ((int)arPixelFormat < 0 || (int)arPixelFormat > AR_PIXEL_FORMAT_MAX) {
ARLOGe("arUtilGetPixelFormatName: Error, unrecognised pixel format (%d).\n", (int)arPixelFormat);
return (NULL);
}
return (names[(int)arPixelFormat]);
}
示例11: ar2VideoGetAbsMaxValue1394
int ar2VideoGetAbsMaxValue1394(AR2VideoParam1394T *vid, int paramName, ARdouble *value)
{
dc1394feature_t feature;
float min, max;
switch (paramName)
{
case AR_VIDEO_1394_GAMMA:
feature = DC1394_FEATURE_GAMMA;
break;
default:
return -1;
}
if (dc1394_feature_get_absolute_boundaries(vid->camera, feature, &min, &max) != DC1394_SUCCESS)
{
ARLOGe("unable to get max value.\n");
return -1;
}
*value = (float)max;
return 0;
}
示例12: startWMC
static bool startWMC(AR2VideoParamWinMCT *vid, const int width, const int height)
{
if (!vid || !vid->wmc) return false;
if (vid->wmc->Capturing()) {
ARLOGe("Windows.Media.Capture already started.\n");
return false;
}
if (!vid->wmc->StartCapture(width, height, getWMCVideoMediaSubTypeForARPixelFormat(vid->format), vid->devNum - 1, vid->preferredDeviceLocation, errorWMC, (void *)vid)) {
ARLOGe("Error starting capture.\n");
return false;
}
return true;
}
示例13: main
int main(int argc, char *argv[])
{
ARParam cparam;
// ARParamLT *cparamLT;
float trans[3][4];
float pos[2];
float dpi[2];
// char name[1024], ext[1024];
int i, j;
float z;
init(argc, argv);
if (!cpara)
cpara = cparaDefault;
// ar2UtilDivideExt( cpara, name, ext );
// Load the camera parameters, resize for the window and init.
// if( arParamLoad(name, ext, 1, &cparam) < 0 )
if (arParamLoad(cpara, 1, &cparam) < 0)
{
ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cpara);
exit(-1);
}
if (xsize != -1 && ysize != -1 && (cparam.xsize != xsize || cparam.ysize != ysize))
{
ARLOG("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
arParamChangeSize(&cparam, xsize, ysize, &cparam);
}
ARLOG("*** Camera Parameter ***\n");
arParamDisp(&cparam);
// if ((cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
// ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
// exit(-1);
// }
pos[0] = 0.0;
pos[1] = 0.0;
for (j = 0; j < 3; j++)
for (i = 0; i < 4; i++)
trans[j][i] = ((i == j) ? 1.0 : 0.0);
for (i = 10; i <= 1000; i *= 10)
{
for (j = 1; j < 10; j++)
{
z = j * i;
trans[2][3] = z;
ar2GetResolution2(&cparam, trans, pos, dpi);
ARLOG("Distance: %f [mm] --> Resolution = %10.5f, %10.5f [DPI]\n", z, dpi[0], dpi[1]);
}
}
return (0);
}
示例14: videoAndroidNativeCaptureOpen
VIDEO_ANDROID_NATIVE_CAPTURE* videoAndroidNativeCaptureOpen(int cameraIndex)
{
CameraActivity::ErrorCode ca_err;
ARLOGd("videoAndroidNativeCaptureOpen(%d).\n", cameraIndex);
VIDEO_ANDROID_NATIVE_CAPTURE *nc = (VIDEO_ANDROID_NATIVE_CAPTURE*)calloc(1, sizeof(VIDEO_ANDROID_NATIVE_CAPTURE));
if (!nc)
{
ARLOGe("Out of memory!\n");
return (NULL);
}
nc->ca = new ARToolKitVideoAndroidCameraActivity(nc);
if (!nc->ca)
{
ARLOGe("Unable to create native connection to camera.\n");
goto bail;
}
// Lock manages contention between user thread, CameraActivity::onFrameBuffer thread (might be same as user thread), and frameReadyNotifierThread.
pthread_mutex_init(&nc->frameLock, NULL);
pthread_cond_init(&nc->frameReadyNotifierThreadCondGo, NULL);
ca_err = nc->ca->connect(cameraIndex);
if (ca_err != CameraActivity::NO_ERROR)
{
ARLOGe("Error %d opening native connection to camera.\n", ca_err);
goto bail1;
}
nc->frameWidth = (int)nc->ca->getProperty(ANDROID_CAMERA_PROPERTY_FRAMEWIDTH);
nc->frameHeight = (int)nc->ca->getProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT);
ARLOGd("/videoAndroidNativeCaptureOpen %dx%d.\n", nc->frameWidth, nc->frameHeight);
return (nc);
bail1:
delete(nc->ca);
pthread_cond_destroy(&nc->frameReadyNotifierThreadCondGo);
pthread_mutex_destroy(&nc->frameLock);
bail:
free(nc);
return (NULL);
}
示例15: arUtilChangeToResourcesDirectory
int arUtilChangeToResourcesDirectory(AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR behavior, const char *path)
#endif
{
char *wpath;
AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR behaviorW;
if (behavior == AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_BEST) {
#if defined(__APPLE__)
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_BUNDLE_RESOURCES_DIR;
#elif defined(ANDROID)
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_APP_CACHE_DIR;
#elif defined(_WIN32) || defined(__linux)
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_EXECUTABLE_DIR;
#else
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_CWD;
#endif
} else {
behaviorW = behavior;
}
if (behaviorW != AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_SUPPLIED_PATH) {
#ifdef ANDROID
wpath = arUtilGetResourcesDirectoryPath(behavior, instanceOfAndroidContext);
#else
wpath = arUtilGetResourcesDirectoryPath(behavior);
#endif
if (wpath) {
if (chdir(wpath) != 0) {
ARLOGe("Error: Unable to change working directory to '%s'.\n", wpath);
ARLOGperror(NULL);
free (wpath);
return (-1);
}
free(wpath);
}
}
if (path) {
if (chdir(path) != 0) {
ARLOGe("Error: Unable to change working directory to '%s'.\n", path);
ARLOGperror(NULL);
return (-1);
}
}
return (0);
}