本文整理汇总了Java中com.vuforia.CameraDevice类的典型用法代码示例。如果您正苦于以下问题:Java CameraDevice类的具体用法?Java CameraDevice怎么用?Java CameraDevice使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
CameraDevice类属于com.vuforia包,在下文中一共展示了CameraDevice类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: onSingleTapUp
import com.vuforia.CameraDevice; //导入依赖的package包/类
@Override
public boolean onSingleTapUp(MotionEvent e) {
// Generates a Handler to trigger autofocus
// after 1 second
autofocusHandler.postDelayed(new Runnable() {
public void run() {
boolean result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO);
if (!result)
Log.e("SingleTapUp", "Unable to trigger focus");
}
}, 1000L);
return true;
}
示例2: stopCamera
import com.vuforia.CameraDevice; //导入依赖的package包/类
private void stopCamera()
{
mSessionControl.doStopTrackers();
CameraDevice.getInstance().setFlashTorchMode(false);
CameraDevice.getInstance().stop();
CameraDevice.getInstance().deinit();
}
示例3: setFocusMode
import com.vuforia.CameraDevice; //导入依赖的package包/类
private boolean setFocusMode(int mode) throws VuforiaException
{
boolean result = CameraDevice.getInstance().setFocusMode(mode);
if (!result)
throw new VuforiaException(
VuforiaException.SET_FOCUS_MODE_FAILURE,
"Failed to set focus mode: " + mode);
return true;
}
示例4: processFrame
import com.vuforia.CameraDevice; //导入依赖的package包/类
public TrackableResult[] processFrame()
{
if (!mIsActive)
return null;
State state = mRenderer.begin();
mRenderer.drawVideoBackground();
// did we find any trackables this frame?
TrackableResult[] results = new TrackableResult[state.getNumTrackableResults()];
for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
//remember trackable
TrackableResult result = state.getTrackableResult(tIdx);
lastTrackableName = result.getTrackable().getName();
results[tIdx] = result;
//calculate filed of view
CameraCalibration calibration = CameraDevice.getInstance().getCameraCalibration();
Vec2F size = calibration.getSize();
Vec2F focalLength = calibration.getFocalLength();
fieldOfViewRadians = (float) (2 * Math.atan(0.5f * size.getData()[0] / focalLength.getData()[0]));
}
mRenderer.end();
return results;
}
示例5: stopCamera
import com.vuforia.CameraDevice; //导入依赖的package包/类
public void stopCamera()
{
if(mCameraRunning)
{
mSessionControl.doStopTrackers();
CameraDevice.getInstance().stop();
CameraDevice.getInstance().deinit();
mCameraRunning = false;
}
}
示例6: startAR
import com.vuforia.CameraDevice; //导入依赖的package包/类
public void startAR(int camera) throws VuforiaException
{
String error;
mCamera = camera;
if (!CameraDevice.getInstance().init(camera))
{
error = "Unable to open camera device: " + camera;
Log.e(LOGTAG, error);
throw new VuforiaException(
VuforiaException.CAMERA_INITIALIZATION_FAILURE, error);
}
configureVideoBackground();
if (!CameraDevice.getInstance().selectVideoMode(
CameraDevice.MODE.MODE_DEFAULT))
{
error = "Unable to set video mode";
Log.e(LOGTAG, error);
throw new VuforiaException(
VuforiaException.CAMERA_INITIALIZATION_FAILURE, error);
}
if (!CameraDevice.getInstance().start())
{
error = "Unable to start camera device: " + camera;
Log.e(LOGTAG, error);
throw new VuforiaException(
VuforiaException.CAMERA_INITIALIZATION_FAILURE, error);
}
Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);
mSessionControl.doStartTrackers();
try
{
setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO);
} catch (VuforiaException exceptionTriggerAuto)
{
setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL);
}
}
示例7: configureVideoBackground
import com.vuforia.CameraDevice; //导入依赖的package包/类
private void configureVideoBackground()
{
CameraDevice cameraDevice = CameraDevice.getInstance();
VideoMode vm = cameraDevice.getVideoMode(CameraDevice.MODE.MODE_DEFAULT);
VideoBackgroundConfig config = new VideoBackgroundConfig();
config.setEnabled(true);
config.setPosition(new Vec2I(0, 0));
int xSize, ySize;
if (mIsPortrait)
{
xSize = (int) (vm.getHeight() * (mScreenHeight / (float) vm
.getWidth()));
ySize = mScreenHeight;
if (xSize < mScreenWidth)
{
xSize = mScreenWidth;
ySize = (int) (mScreenWidth * (vm.getWidth() / (float) vm
.getHeight()));
}
} else
{
xSize = mScreenWidth;
ySize = (int) (vm.getHeight() * (mScreenWidth / (float) vm
.getWidth()));
if (ySize < mScreenHeight)
{
xSize = (int) (mScreenHeight * (vm.getWidth() / (float) vm
.getHeight()));
ySize = mScreenHeight;
}
}
config.setSize(new Vec2I(xSize, ySize));
Log.i(LOGTAG, "Configure Video Background : Video (" + vm.getWidth()
+ " , " + vm.getHeight() + "), Screen (" + mScreenWidth + " , "
+ mScreenHeight + "), mSize (" + xSize + " , " + ySize + ")");
Renderer.getInstance().setVideoBackgroundConfig(config);
}
示例8: startAR
import com.vuforia.CameraDevice; //导入依赖的package包/类
public void startAR(int camera) throws ApplicationException
{
String error;
if(mCameraRunning)
{
error = "Camera already running, unable to open again";
Log.e(LOGTAG, error);
throw new ApplicationException(
ApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
mCamera = camera;
if (!CameraDevice.getInstance().init(camera))
{
error = "Unable to open camera device: " + camera;
Log.e(LOGTAG, error);
throw new ApplicationException(
ApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
if (!CameraDevice.getInstance().selectVideoMode(
CameraDevice.MODE.MODE_DEFAULT))
{
error = "Unable to set video mode";
Log.e(LOGTAG, error);
throw new ApplicationException(
ApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
// Configure the rendering of the video background
configureVideoBackground();
if (!CameraDevice.getInstance().start())
{
error = "Unable to start camera device: " + camera;
Log.e(LOGTAG, error);
throw new ApplicationException(
ApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
setProjectionMatrix();
mSessionControl.doStartTrackers();
mCameraRunning = true;
if(!CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO))
{
if(!CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO))
CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL);
}
}
示例9: setProjectionMatrix
import com.vuforia.CameraDevice; //导入依赖的package包/类
public void setProjectionMatrix()
{
CameraCalibration camCal = CameraDevice.getInstance()
.getCameraCalibration();
mProjectionMatrix = Tool.getProjectionGL(camCal, 10.0f, 5000.0f);
}
示例10: configureVideoBackground
import com.vuforia.CameraDevice; //导入依赖的package包/类
private void configureVideoBackground()
{
CameraDevice cameraDevice = CameraDevice.getInstance();
VideoMode vm = cameraDevice.getVideoMode(CameraDevice.MODE.MODE_DEFAULT);
VideoBackgroundConfig config = new VideoBackgroundConfig();
config.setEnabled(true);
config.setPosition(new Vec2I(0, 0));
int xSize = 0, ySize = 0;
if (mIsPortrait)
{
xSize = (int) (vm.getHeight() * (mScreenHeight / (float) vm
.getWidth()));
ySize = mScreenHeight;
if (xSize < mScreenWidth)
{
xSize = mScreenWidth;
ySize = (int) (mScreenWidth * (vm.getWidth() / (float) vm
.getHeight()));
}
} else
{
xSize = mScreenWidth;
ySize = (int) (vm.getHeight() * (mScreenWidth / (float) vm
.getWidth()));
if (ySize < mScreenHeight)
{
xSize = (int) (mScreenHeight * (vm.getWidth() / (float) vm
.getHeight()));
ySize = mScreenHeight;
}
}
config.setSize(new Vec2I(xSize, ySize));
// The Vuforia VideoBackgroundConfig takes the position relative to the
// centre of the screen, where as the OpenGL glViewport call takes the
// position relative to the lower left corner
mViewport = new int[4];
mViewport[0] = ((mScreenWidth - xSize) / 2) + config.getPosition().getData()[0];
mViewport[1] = ((mScreenHeight - ySize) / 2) + config.getPosition().getData()[1];
mViewport[2] = xSize;
mViewport[3] = ySize;
Log.i(LOGTAG, "Configure Video Background : Video (" + vm.getWidth()
+ " , " + vm.getHeight() + "), Screen (" + mScreenWidth + " , "
+ mScreenHeight + "), mSize (" + xSize + " , " + ySize + ")");
Renderer.getInstance().setVideoBackgroundConfig(config);
}
示例11: onInitARDone
import com.vuforia.CameraDevice; //导入依赖的package包/类
@Override
public void onInitARDone(ApplicationException exception)
{
if (exception == null)
{
initApplicationAR();
mRenderer.mIsActive = true;
// Now add the GL surface view. It is important
// that the OpenGL ES surface view gets added
// BEFORE the camera is started and video
// background is configured.
addContentView(mGlView, new LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
// Sets the UILayout to be drawn in front of the camera
mUILayout.bringToFront();
// Sets the layout background to transparent
mUILayout.setBackgroundColor(Color.TRANSPARENT);
try
{
vuforiaAppSession.startAR(CameraDevice.CAMERA_DIRECTION.CAMERA_DIRECTION_DEFAULT);
} catch (ApplicationException e)
{
Log.e(LOGTAG, e.getString());
}
boolean result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);
if (result)
mContAutofocus = true;
else
Log.e(LOGTAG, "Unable to enable continuous autofocus");
} else
{
Log.e(LOGTAG, exception.getString());
showInitializationErrorMessage(exception.getString());
}
}
示例12: onInitARDone
import com.vuforia.CameraDevice; //导入依赖的package包/类
@Override
public void onInitARDone(SampleApplicationException exception)
{
if (exception == null)
{
initApplicationAR();
mRenderer.mIsActive = true;
// Now add the GL surface view. It is important
// that the OpenGL ES surface view gets added
// BEFORE the camera is started and video
// background is configured.
addContentView(mGlView, new LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
// Sets the UILayout to be drawn in front of the camera
mUILayout.bringToFront();
// Hides the Loading Dialog
loadingDialogHandler
.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
// Sets the layout background to transparent
mUILayout.setBackgroundColor(Color.TRANSPARENT);
try
{
vuforiaAppSession.startAR(CameraDevice.CAMERA_DIRECTION.CAMERA_DIRECTION_DEFAULT);
} catch (SampleApplicationException e)
{
Log.e(LOGTAG, e.getString());
}
boolean result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);
if (!result)
Log.e(LOGTAG, "Unable to enable continuous autofocus");
mSampleAppMenu = new SampleAppMenu(this, this, "Video Playback",
mGlView, mUILayout, null);
setSampleAppMenuSettings();
mIsInitialized = true;
} else
{
Log.e(LOGTAG, exception.getString());
showInitializationErrorMessage(exception.getString());
}
}
示例13: startAR
import com.vuforia.CameraDevice; //导入依赖的package包/类
public void startAR(int camera) throws SampleApplicationException
{
String error;
if(mCameraRunning)
{
error = "Camera already running, unable to open again";
Log.e(LOGTAG, error);
throw new SampleApplicationException(
SampleApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
mCamera = camera;
if (!CameraDevice.getInstance().init(camera))
{
error = "Unable to open camera device: " + camera;
Log.e(LOGTAG, error);
throw new SampleApplicationException(
SampleApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
if (!CameraDevice.getInstance().selectVideoMode(
CameraDevice.MODE.MODE_DEFAULT))
{
error = "Unable to set video mode";
Log.e(LOGTAG, error);
throw new SampleApplicationException(
SampleApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
// Configure the rendering of the video background
configureVideoBackground();
if (!CameraDevice.getInstance().start())
{
error = "Unable to start camera device: " + camera;
Log.e(LOGTAG, error);
throw new SampleApplicationException(
SampleApplicationException.CAMERA_INITIALIZATION_FAILURE, error);
}
setProjectionMatrix();
mSessionControl.doStartTrackers();
mCameraRunning = true;
if(!CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO))
{
if(!CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO))
CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL);
}
}
示例14: configureVideoBackground
import com.vuforia.CameraDevice; //导入依赖的package包/类
private void configureVideoBackground()
{
CameraDevice cameraDevice = CameraDevice.getInstance();
VideoMode vm = cameraDevice.getVideoMode(CameraDevice.MODE.MODE_DEFAULT);
VideoBackgroundConfig config = new VideoBackgroundConfig();
config.setEnabled(true);
config.setPosition(new Vec2I(0, 0));
int xSize = 0, ySize = 0;
if (mIsPortrait)
{
xSize = (int) (vm.getHeight() * (mScreenHeight / (float) vm
.getWidth()));
ySize = mScreenHeight;
if (xSize < mScreenWidth)
{
xSize = mScreenWidth;
ySize = (int) (mScreenWidth * (vm.getWidth() / (float) vm
.getHeight()));
}
} else
{
xSize = mScreenWidth;
ySize = (int) (vm.getHeight() * (mScreenWidth / (float) vm
.getWidth()));
if (ySize < mScreenHeight)
{
xSize = (int) (mScreenHeight * (vm.getWidth() / (float) vm
.getHeight()));
ySize = mScreenHeight;
}
}
config.setSize(new Vec2I(xSize, ySize));
// Calculate viewport centred in the screen
mViewport = new int[4];
mViewport[0] = ((mScreenWidth - xSize) / 2) + config.getPosition().getData()[0];
mViewport[1] = ((mScreenHeight - ySize) / 2) + config.getPosition().getData()[1];
mViewport[2] = xSize;
mViewport[3] = ySize;
Log.i(LOGTAG, "Configure Video Background : Video (" + vm.getWidth()
+ " , " + vm.getHeight() + "), Screen (" + mScreenWidth + " , "
+ mScreenHeight + "), mSize (" + xSize + " , " + ySize + ")");
Renderer.getInstance().setVideoBackgroundConfig(config);
}