本文整理汇总了Java中org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix类的典型用法代码示例。如果您正苦于以下问题:Java OpenGLMatrix类的具体用法?Java OpenGLMatrix怎么用?Java OpenGLMatrix使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
OpenGLMatrix类属于org.firstinspires.ftc.robotcore.external.matrices包,在下文中一共展示了OpenGLMatrix类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setTargetInfo
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
/**
* This method sets the properties of the specified target.
*
* @param index specifies the target index in the XML file.
* @param name specifies the target name.
* @param locationOnField specifies the target location on the field, can be null if no robot tracking.
* @param phoneLocationOnRobot specifies the phone location on the robot, can be null if no robot tracking.
*/
public void setTargetInfo(int index, String name, OpenGLMatrix locationOnField, OpenGLMatrix phoneLocationOnRobot)
{
if (targetList != null)
{
VuforiaTrackable target = targetList.get(index);
target.setName(name);
if (locationOnField != null)
{
target.setLocation(locationOnField);
}
if (phoneLocationOnRobot != null)
{
((VuforiaTrackableDefaultListener) target.getListener()).setPhoneInformation(
phoneLocationOnRobot, cameraDir);
}
}
}
示例2: getVuMarkPosition
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public VectorF getVuMarkPosition()
{
VectorF targetPos = null;
VuforiaTrackable target = vuforia.getTarget(0);
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(target);
if (vuforia.isTargetVisible(target) && vuMark != RelicRecoveryVuMark.UNKNOWN)
{
OpenGLMatrix pose = vuforia.getTargetPose(target);
if (pose != null)
{
targetPos = pose.getTranslation();
robot.tracer.traceInfo("TargetPos", "%s: x=%6.2f, y=%6.2f, z=%6.2f",
vuMark.toString(),
targetPos.get(0)/RobotInfo.MM_PER_INCH,
targetPos.get(1)/RobotInfo.MM_PER_INCH,
-targetPos.get(2)/RobotInfo.MM_PER_INCH);
}
}
return targetPos;
}
示例3: getVuMarkOrientation
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public Orientation getVuMarkOrientation()
{
Orientation targetAngle = null;
VuforiaTrackable target = vuforia.getTarget(0);
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(target);
if (vuforia.isTargetVisible(target) && vuMark != RelicRecoveryVuMark.UNKNOWN)
{
OpenGLMatrix pose = vuforia.getTargetPose(target);
if (pose != null)
{
targetAngle = Orientation.getOrientation(
pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
robot.tracer.traceInfo("TargetRot", "%s: xRot=%6.2f, yRot=%6.2f, zRot=%6.2f",
vuMark.toString(),
targetAngle.firstAngle, targetAngle.secondAngle, targetAngle.thirdAngle);
}
}
return targetAngle;
}
示例4: updateLocation
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public void updateLocation(){
OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicVuMark.getListener()).getPose();
telemetry.addData("Pose", format(pose));
/* We further illustrate how to decompose the pose into useful rotational and
* translational components */
if (pose != null) {
VectorF trans = pose.getTranslation();
Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
// Extract the X, Y, and Z components of the offset of the target relative to the robot
tX = trans.get(0);
tY = trans.get(1);
tZ = trans.get(2);
// Extract the rotational components of the target relative to the robot
rX = rot.firstAngle;
rY = rot.secondAngle;
rZ = rot.thirdAngle;
//Z is forward-backward
//x is sideways
}
}
示例5: getVuMarkPosition
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
private VectorF getVuMarkPosition()
{
VectorF targetPos = null;
VuforiaTrackable target = vuforia.getTarget(0);
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(target);
if (vuforia.isTargetVisible(target) && vuMark != RelicRecoveryVuMark.UNKNOWN)
{
OpenGLMatrix pose = vuforia.getTargetPose(target);
if (pose != null)
{
targetPos = pose.getTranslation();
}
}
return targetPos;
}
示例6: getVuMarkOrientation
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
private Orientation getVuMarkOrientation()
{
Orientation targetAngle = null;
VuforiaTrackable target = vuforia.getTarget(0);
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(target);
if (vuforia.isTargetVisible(target) && vuMark != RelicRecoveryVuMark.UNKNOWN)
{
OpenGLMatrix pose = vuforia.getTargetPose(target);
if (pose != null)
{
targetAngle = Orientation.getOrientation(
pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
}
}
return targetAngle;
}
示例7: setTargets
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
/**
* This method sets tracking info for the targets described in the given target array.
*
* @param targets specifies the array of targets to set tracking info.
* @param phoneLocationOnRobot specifies the location marix of the phone on the robot.
*/
public void setTargets(Target[] targets, OpenGLMatrix phoneLocationOnRobot)
{
for (int i = 0; i < targets.length; i++)
{
OpenGLMatrix targetLocationOnField =
phoneLocationOnRobot == null?
null:
locationMatrix(
targets[i].rotateX, targets[i].rotateY, targets[i].rotateZ,
targets[i].translateX, targets[i].translateY, targets[i].translateZ);
setTargetInfo(i, targets[i].name, targetLocationOnField, phoneLocationOnRobot);
}
}
示例8: runOpMode
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public void runOpMode() throws InterruptedException
{
setupVuforia();
lastKnownLocation = createMatrix(0,0,0,0,0,0);
waitForStart();
visionTargets.activate();
while(opModeIsActive())
{
OpenGLMatrix latestLocation = listener.getUpdatedRobotLocation();
vuMark = RelicRecoveryVuMark.from(relicVuMark);
if(latestLocation !=null)
{lastKnownLocation = latestLocation;}
float[] coordinates = lastKnownLocation.getTranslation().getData();
robotX = coordinates[0];
robotY = coordinates[1];
robotAngle = Orientation.getOrientation(lastKnownLocation, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).thirdAngle;
RelicRecoveryVuMark key = vuMark;
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
telemetry.addData("Navi", "%s visible", vuMark);
}else{
telemetry.addData("Navi", "not visible");
telemetry.addData("Navi Sees:", vuMark);
}
telemetry.addData("Last Known Location", formatMatrix(lastKnownLocation));
telemetry.addData("key",key.toString());
telemetry.update();
}
}
示例9: runOpMode
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public void runOpMode() throws InterruptedException
{
setupVuforia();
lastKnownLocation = createMatrix(0,0,0,0,0,0);
waitForStart();
visionTargets.activate();
while(opModeIsActive())
{
OpenGLMatrix latestLocation = listener.getUpdatedRobotLocation();
vuMark = RelicRecoveryVuMark.from(relicVuMark);
if(latestLocation !=null)
{lastKnownLocation = latestLocation;}
float[] coordinates = lastKnownLocation.getTranslation().getData();
robotX = coordinates[0];
robotY = coordinates[1];
robotAngle = Orientation.getOrientation(lastKnownLocation, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).thirdAngle;
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
key = vuMark;
}
}
}
示例10: getBeaconConfig
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public static int getBeaconConfig(Image img, VuforiaTrackableDefaultListener beacon, CameraCalibration camCal) {
OpenGLMatrix pose = beacon.getRawPose();
if (pose != null && img != null && img.getPixels() != null) {
Matrix34F rawPose = new Matrix34F();
float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
rawPose.setData(poseData);
float[][] corners = new float[4][2];
corners[0] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 276, 0)).getData(); //upper left of beacon
corners[1] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 276, 0)).getData(); //upper right of beacon
corners[2] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, -92, 0)).getData(); //lower right of beacon
corners[3] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, -92, 0)).getData(); //lower left of beacon
//getting camera image...
Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
bm.copyPixelsFromBuffer(img.getPixels());
//turning the corner pixel coordinates into a proper bounding box
Mat crop = bitmapToMat(bm, CvType.CV_8UC3);
float x = Math.min(Math.min(corners[1][0], corners[3][0]), Math.min(corners[0][0], corners[2][0]));
float y = Math.min(Math.min(corners[1][1], corners[3][1]), Math.min(corners[0][1], corners[2][1]));
float width = Math.max(Math.abs(corners[0][0] - corners[2][0]), Math.abs(corners[1][0] - corners[3][0]));
float height = Math.max(Math.abs(corners[0][1] - corners[2][1]), Math.abs(corners[1][1] - corners[3][1]));
//make sure our bounding box doesn't go outside of the image
//OpenCV doesn't like that...
x = Math.max(x, 0);
y = Math.max(y, 0);
width = (x + width > crop.cols())? crop.cols() - x : width;
height = (y + height > crop.rows())? crop.rows() - y : height;
//cropping bounding box out of camera image
final Mat cropped = new Mat(crop, new Rect((int) x, (int) y, (int) width, (int) height));
//filtering out non-beacon-blue colours in HSV colour space
Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_RGB2HSV_FULL);
//get filtered mask
//if pixel is within acceptable blue-beacon-colour range, it's changed to white.
//Otherwise, it's turned to black
Mat mask = new Mat();
Core.inRange(cropped, BEACON_BLUE_LOW, BEACON_BLUE_HIGH, mask);
Moments mmnts = Imgproc.moments(mask, true);
//calculating centroid of the resulting binary mask via image moments
Log.i("CentroidX", "" + ((mmnts.get_m10() / mmnts.get_m00())));
Log.i("CentroidY", "" + ((mmnts.get_m01() / mmnts.get_m00())));
//checking if blue either takes up the majority of the image (which means the beacon is all blue)
//or if there's barely any blue in the image (which means the beacon is all red or off)
// if (mmnts.get_m00() / mask.total() > 0.8) {
// return VortexUtils.BEACON_ALL_BLUE;
// } else if (mmnts.get_m00() / mask.total() < 0.1) {
// return VortexUtils.BEACON_NO_BLUE;
// }//elseif
//Note: for some reason, we end up with a image that is rotated 90 degrees
//if centroid is in the bottom half of the image, the blue beacon is on the left
//if the centroid is in the top half, the blue beacon is on the right
if ((mmnts.get_m01() / mmnts.get_m00()) < cropped.rows() / 2) {
return BEACON_RED_BLUE;
} else {
return BEACON_BLUE_RED;
}
}
return NOT_VISIBLE;
}
示例11: format
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
String format(OpenGLMatrix transformationMatrix) {
return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null";
}
示例12: createMatrix
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public OpenGLMatrix createMatrix(float x,float y,float z,float u,float v,float w)
{
return OpenGLMatrix.translation(x,y,z).
multiplied(Orientation.getRotationMatrix(
AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES, u,v,w));
}
示例13: formatMatrix
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public String formatMatrix(OpenGLMatrix matrix)
{
return matrix.formatAsTransform();
}
示例14: createMatrix
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public OpenGLMatrix createMatrix(float x, float y, float z, float u, float v, float w)
{
return OpenGLMatrix.translation(x,y,z).
multiplied(Orientation.getRotationMatrix(
AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES, u,v,w));
}
示例15: formatMatrix
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; //导入依赖的package包/类
public String formatMatrix(OpenGLMatrix matrix)
{
return matrix.formatAsTransform();
}