本文整理汇总了Java中android.hardware.SensorManager.getRotationMatrixFromVector方法的典型用法代码示例。如果您正苦于以下问题:Java SensorManager.getRotationMatrixFromVector方法的具体用法?Java SensorManager.getRotationMatrixFromVector怎么用?Java SensorManager.getRotationMatrixFromVector使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类android.hardware.SensorManager
的用法示例。
在下文中一共展示了SensorManager.getRotationMatrixFromVector方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: update
import android.hardware.SensorManager; //导入方法依赖的package包/类
protected void update(float[] vectors) {
int worldAxisX = SensorManager.AXIS_X;
int worldAxisZ = SensorManager.AXIS_Z;
float[] rotationMatrix = new float[9];
float[] adjustedRotationMatrix = new float[9];
float[] orientation = new float[3];
SensorManager.getRotationMatrixFromVector(rotationMatrix, vectors);
SensorManager.remapCoordinateSystem(rotationMatrix, worldAxisX, worldAxisZ, adjustedRotationMatrix);
SensorManager.getOrientation(adjustedRotationMatrix, orientation);
float roll = orientation[2] * FROM_RADS_TO_DEGS;
if (roll > -45 && roll < 45) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
Log.d(LOG_TAG, "Requesting undefined");
}
Log.d(LOG_TAG, "Roll: " + roll);
}
示例2: onSensorChanged
import android.hardware.SensorManager; //导入方法依赖的package包/类
@Override
public void onSensorChanged(SensorEvent event) {
// we received a sensor event. it is a good practice to check
// that we received the proper event
if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
// convert the rotation-vector to a 4x4 matrix. the matrix
// is interpreted by Open GL as the inverse of the
// rotation-vector, which is what we want.
SensorManager.getRotationMatrixFromVector(currentOrientationRotationMatrix.matrix, event.values);
// Get Quaternion
// Calculate angle. Starting with API_18, Android will provide this value as event.values[3], but if not, we have to calculate it manually.
SensorManager.getQuaternionFromVector(temporaryQuaternion, event.values);
currentOrientationQuaternion.setXYZW(temporaryQuaternion[1], temporaryQuaternion[2], temporaryQuaternion[3], -temporaryQuaternion[0]);
}
}
示例3: calculateOrientation
import android.hardware.SensorManager; //导入方法依赖的package包/类
private void calculateOrientation() {
// If phone doesn't have Rotation Vector sensor, calculate orientation based on Accelerometer and Magnetometer
if (SensorManager.getRotationMatrix(mAccMagMatrix, null, mAccel, mMagnet) && !hasRotationSensor) {
SensorManager.getOrientation(mAccMagMatrix, mOrientation);
} else {
SensorManager.getRotationMatrixFromVector(mRotationMatrixFromVector, mRotation);
SensorManager.getOrientation(mRotationMatrixFromVector, mOrientation);
}
// Calculate azimuth to detect direction
currentAzimuth = Math.toDegrees(mOrientation[0]);
// Only notify other receivers if there is a change in orientation greater than 2.0 degrees
if(Math.abs(currentAzimuth - preAzimuth) >= 2.0) {
announceChange(ANGLE_UPDATE);
preAzimuth = currentAzimuth;
}
}
示例4: interpretSensorEvent
import android.hardware.SensorManager; //导入方法依赖的package包/类
@SuppressWarnings("SuspiciousNameCombination")
public float[] interpretSensorEvent(@NonNull Context context, @Nullable SensorEvent event) {
if (event == null) {
return null;
}
float[] rotationVector = getRotationVectorFromSensorEvent(event);
if (!mTargeted) {
setTargetVector(rotationVector);
return null;
}
SensorManager.getRotationMatrixFromVector(mRotationMatrix, rotationVector);
final int rotation = ((WindowManager) context
.getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay()
.getRotation();
if (rotation == Surface.ROTATION_0) {
SensorManager.getAngleChange(mTiltVector, mRotationMatrix, mTargetMatrix);
} else {
switch (rotation) {
case Surface.ROTATION_90:
SensorManager.remapCoordinateSystem(mRotationMatrix, AXIS_Y, AXIS_MINUS_X, mOrientedRotationMatrix);
break;
case Surface.ROTATION_180:
SensorManager.remapCoordinateSystem(mRotationMatrix, AXIS_MINUS_X, AXIS_MINUS_Y, mOrientedRotationMatrix);
break;
case Surface.ROTATION_270:
SensorManager.remapCoordinateSystem(mRotationMatrix, AXIS_MINUS_Y, AXIS_X, mOrientedRotationMatrix);
break;
}
SensorManager.getAngleChange(mTiltVector, mOrientedRotationMatrix, mTargetMatrix);
}
for (int i = 0; i < mTiltVector.length; i++) {
mTiltVector[i] /= Math.PI;
mTiltVector[i] *= mTiltSensitivity;
if (mTiltVector[i] > 1) {
mTiltVector[i] = 1f;
} else if (mTiltVector[i] < -1) {
mTiltVector[i] = -1f;
}
}
return mTiltVector;
}
示例5: onSensorChanged
import android.hardware.SensorManager; //导入方法依赖的package包/类
@Override
public void onSensorChanged(SensorEvent event) {
// we received a sensor event. it is a good practice to check
// that we received the proper event
if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {
// This timestamps delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if (timestamp != 0) {
final float dT = (event.timestamp - timestamp) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = event.values[0];
float axisY = event.values[1];
float axisZ = event.values[2];
// Calculate the angular speed of the sample
gyroscopeRotationVelocity = Math.sqrt(axisX * axisX + axisY * axisY + axisZ * axisZ);
// Normalize the rotation vector if it's big enough to get the axis
if (gyroscopeRotationVelocity > EPSILON) {
axisX /= gyroscopeRotationVelocity;
axisY /= gyroscopeRotationVelocity;
axisZ /= gyroscopeRotationVelocity;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
double thetaOverTwo = gyroscopeRotationVelocity * dT / 2.0f;
double sinThetaOverTwo = Math.sin(thetaOverTwo);
double cosThetaOverTwo = Math.cos(thetaOverTwo);
deltaQuaternion.setX((float) (sinThetaOverTwo * axisX));
deltaQuaternion.setY((float) (sinThetaOverTwo * axisY));
deltaQuaternion.setZ((float) (sinThetaOverTwo * axisZ));
deltaQuaternion.setW(-(float) cosThetaOverTwo);
// Matrix rendering in CubeRenderer does not seem to have this problem.
synchronized (synchronizationToken) {
// Move current gyro orientation if gyroscope should be used
deltaQuaternion.multiplyByQuat(currentOrientationQuaternion, currentOrientationQuaternion);
}
correctedQuaternion.set(currentOrientationQuaternion);
// We inverted w in the deltaQuaternion, because currentOrientationQuaternion required it.
// Before converting it back to matrix representation, we need to revert this process
correctedQuaternion.w(-correctedQuaternion.w());
synchronized (synchronizationToken) {
// Set the rotation matrix as well to have both representations
SensorManager.getRotationMatrixFromVector(currentOrientationRotationMatrix.matrix,
correctedQuaternion.array());
}
}
timestamp = event.timestamp;
}
}
示例6: setOrientationQuaternionAndMatrix
import android.hardware.SensorManager; //导入方法依赖的package包/类
/**
* Sets the output quaternion and matrix with the provided quaternion and synchronises the setting
*
* @param quaternion The Quaternion to set (the result of the sensor fusion)
*/
private void setOrientationQuaternionAndMatrix(Quaternion quaternion) {
correctedQuaternion.set(quaternion);
// We inverted w in the deltaQuaternion, because currentOrientationQuaternion required it.
// Before converting it back to matrix representation, we need to revert this process
correctedQuaternion.w(-correctedQuaternion.w());
synchronized (synchronizationToken) {
// Use gyro only
currentOrientationQuaternion.copyVec4(quaternion);
// Set the rotation matrix as well to have both representations
SensorManager.getRotationMatrixFromVector(currentOrientationRotationMatrix.matrix, correctedQuaternion.array());
}
}
开发者ID:peter10110,项目名称:Android-SteamVR-controller,代码行数:20,代码来源:ImprovedOrientationSensor1Provider.java
示例7: getOrientation
import android.hardware.SensorManager; //导入方法依赖的package包/类
/**
* The orientation of the device. Euler angles in units of radians.
* values[0]: azimuth, rotation around the Z axis. values[1]: pitch,
* rotation around the X axis. values[2]: roll, rotation around the Y axis.
*/
public float[] getOrientation()
{
if (isOrientationValidAccelMag)
{
// Now we get a structure we can pass to get a rotation matrix, and
// then an orientation vector from Android.
qvOrientation[0] = (float) qGyroscope.getVectorPart()[0];
qvOrientation[1] = (float) qGyroscope.getVectorPart()[1];
qvOrientation[2] = (float) qGyroscope.getVectorPart()[2];
qvOrientation[3] = (float) qGyroscope.getScalarPart();
// We need a rotation matrix so we can get the orientation vector...
// Getting Euler
// angles from a quaternion is not trivial, so this is the easiest
// way,
// but perhaps
// not the fastest way of doing this.
SensorManager.getRotationMatrixFromVector(rmGyroscope,
qvOrientation);
// Get the fused orienatation
SensorManager.getOrientation(rmGyroscope, vOrientation);
}
return vOrientation;
}
示例8: calculateFusedOrientation
import android.hardware.SensorManager; //导入方法依赖的package包/类
/**
* Calculate the fused orientation.
*/
protected float[] calculateFusedOrientation(float[] gyroscope, float dt, float[] acceleration, float[] magnetic) {
float[] baseOrientation = getBaseOrientation(acceleration, magnetic);
if (baseOrientation != null) {
float alpha = timeConstant / (timeConstant + dt);
float oneMinusAlpha = (1.0f - alpha);
Quaternion rotationVectorAccelerationMagnetic = getAccelerationMagneticRotationVector(baseOrientation);
initializeRotationVectorGyroscopeIfRequired(rotationVectorAccelerationMagnetic);
rotationVectorGyroscope = getGyroscopeRotationVector(rotationVectorGyroscope, gyroscope, dt);
// Apply the complementary filter. // We multiply each rotation by their
// coefficients (scalar matrices)...
Quaternion scaledRotationVectorAccelerationMagnetic = rotationVectorAccelerationMagnetic.multiply
(oneMinusAlpha);
// Scale our quaternion for the gyroscope
Quaternion scaledRotationVectorGyroscope = rotationVectorGyroscope.multiply(alpha);
// ...and then add the two quaternions together.
// output[0] = alpha * output[0] + (1 - alpha) * input[0];
rotationVectorGyroscope = scaledRotationVectorGyroscope.add
(scaledRotationVectorAccelerationMagnetic);
// Now we get a structure we can pass to get a rotation matrix, and then
// an orientation vector from Android.
float[] fusedVector = new float[4];
fusedVector[0] = (float) rotationVectorGyroscope.getVectorPart()[0];
fusedVector[1] = (float) rotationVectorGyroscope.getVectorPart()[1];
fusedVector[2] = (float) rotationVectorGyroscope.getVectorPart()[2];
fusedVector[3] = (float) rotationVectorGyroscope.getScalarPart();
// rotation matrix from gyro data
float[] fusedMatrix = new float[9];
// We need a rotation matrix so we can get the orientation vector...
// Getting Euler
// angles from a quaternion is not trivial, so this is the easiest way,
// but perhaps
// not the fastest way of doing this.
SensorManager.getRotationMatrixFromVector(fusedMatrix, fusedVector);
float[] fusedOrientation = new float[3];
// Get the fused orienatation
SensorManager.getOrientation(fusedMatrix, fusedOrientation);
return fusedOrientation;
}
// The device had a problem determining the base orientation from the acceleration and magnetic sensors,
// possible because of bad inputs or possibly because the device determined the orientation could not be
// calculated, e.g the device is in free-fall
Log.w(tag, "Base Device Orientation could not be computed!");
return null;
}
示例9: setTargetVector
import android.hardware.SensorManager; //导入方法依赖的package包/类
protected void setTargetVector(float[] values) {
SensorManager.getRotationMatrixFromVector(mTargetMatrix, values);
mTargeted = true;
}
示例10: updateOrientation
import android.hardware.SensorManager; //导入方法依赖的package包/类
@SuppressWarnings("SuspiciousNameCombination")
private void updateOrientation(float[] rotationVector) {
float[] rotationMatrix = new float[9];
SensorManager.getRotationMatrixFromVector(rotationMatrix, rotationVector);
final int worldAxisForDeviceAxisX;
final int worldAxisForDeviceAxisY;
// Remap the axes as if the device screen was the instrument panel,
// and adjust the rotation matrix for the device orientation.
switch (windowManager.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
worldAxisForDeviceAxisX = SensorManager.AXIS_Z;
worldAxisForDeviceAxisY = SensorManager.AXIS_MINUS_X;
break;
case Surface.ROTATION_180:
worldAxisForDeviceAxisX = SensorManager.AXIS_MINUS_X;
worldAxisForDeviceAxisY = SensorManager.AXIS_MINUS_Z;
break;
case Surface.ROTATION_270:
worldAxisForDeviceAxisX = SensorManager.AXIS_MINUS_Z;
worldAxisForDeviceAxisY = SensorManager.AXIS_X;
break;
case Surface.ROTATION_0:
default:
worldAxisForDeviceAxisX = SensorManager.AXIS_X;
worldAxisForDeviceAxisY = SensorManager.AXIS_Z;
break;
}
float[] adjustedRotationMatrix = new float[9];
SensorManager.remapCoordinateSystem(rotationMatrix, worldAxisForDeviceAxisX,
worldAxisForDeviceAxisY, adjustedRotationMatrix);
// Transform rotation matrix into azimuth/pitch/roll
float[] orientation = new float[3];
SensorManager.getOrientation(adjustedRotationMatrix, orientation);
// The x-axis is all we care about here.
internalCompassListener.onCompassChanged((float) Math.toDegrees(orientation[0]));
for (CompassListener compassListener : compassListeners) {
compassListener.onCompassChanged((float) Math.toDegrees(orientation[0]));
}
}
示例11: interpretSensorEvent
import android.hardware.SensorManager; //导入方法依赖的package包/类
@Nullable
@SuppressWarnings("SuspiciousNameCombination")
private float[] interpretSensorEvent(@NonNull Context context, @Nullable SensorEvent event) {
if (event == null) {
return null;
}
float[] rotationVector = getRotationVectorFromSensorEvent(event);
if (!mTargeted) {
setTargetVector(rotationVector);
return null;
}
SensorManager.getRotationMatrixFromVector(mRotationMatrix, rotationVector);
final int rotation =
((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay()
.getRotation();
if (rotation == Surface.ROTATION_0) {
SensorManager.getAngleChange(mTiltVector, mRotationMatrix, mTargetMatrix);
} else {
switch (rotation) {
case Surface.ROTATION_90:
SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_Y,
SensorManager.AXIS_MINUS_X, mOrientedRotationMatrix);
break;
case Surface.ROTATION_180:
SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_X,
SensorManager.AXIS_MINUS_Y, mOrientedRotationMatrix);
break;
case Surface.ROTATION_270:
SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_Y,
SensorManager.AXIS_X, mOrientedRotationMatrix);
break;
default:
break;
}
SensorManager.getAngleChange(mTiltVector, mOrientedRotationMatrix, mTargetMatrix);
}
for (int i = 0; i < mTiltVector.length; i++) {
mTiltVector[i] /= Math.PI;
mTiltVector[i] *= mTiltSensitivity;
if (mTiltVector[i] > 1) {
mTiltVector[i] = 1f;
} else if (mTiltVector[i] < -1) {
mTiltVector[i] = -1f;
}
}
return mTiltVector;
}
示例12: setTargetVector
import android.hardware.SensorManager; //导入方法依赖的package包/类
private void setTargetVector(float[] values) {
SensorManager.getRotationMatrixFromVector(mTargetMatrix, values);
mTargeted = true;
}
示例13: parse
import android.hardware.SensorManager; //导入方法依赖的package包/类
@Override
public double[] parse(SensorEvent event) {
float[] sensorValues = event.values;
float[] fixedValues = new float[4];
// Remap axis according to orientation
fixOrientation(sensorValues, fixedValues);
float[] rotationMatrix = new float[9];
// Compute rotation matrix
SensorManager.getRotationMatrixFromVector(rotationMatrix, fixedValues);
return parseRoatationMatrix(rotationMatrix);
}