当前位置: 首页>>代码示例>>Java>>正文


Java AngleUnit类代码示例

本文整理汇总了Java中org.firstinspires.ftc.robotcore.external.navigation.AngleUnit的典型用法代码示例。如果您正苦于以下问题:Java AngleUnit类的具体用法?Java AngleUnit怎么用?Java AngleUnit使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


AngleUnit类属于org.firstinspires.ftc.robotcore.external.navigation包,在下文中一共展示了AngleUnit类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getVuMarkOrientation

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
public Orientation getVuMarkOrientation()
{
    Orientation targetAngle = null;
    VuforiaTrackable target = vuforia.getTarget(0);
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(target);

    if (vuforia.isTargetVisible(target) && vuMark != RelicRecoveryVuMark.UNKNOWN)
    {
        OpenGLMatrix pose = vuforia.getTargetPose(target);
        if (pose != null)
        {
            targetAngle = Orientation.getOrientation(
                    pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
            robot.tracer.traceInfo("TargetRot", "%s: xRot=%6.2f, yRot=%6.2f, zRot=%6.2f",
                    vuMark.toString(),
                    targetAngle.firstAngle, targetAngle.secondAngle, targetAngle.thirdAngle);
        }
    }

    return targetAngle;
}
 
开发者ID:trc492,项目名称:Ftc2018RelicRecovery,代码行数:22,代码来源:VuforiaVision.java

示例2: updateLocation

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
public void updateLocation(){
    OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicVuMark.getListener()).getPose();
    telemetry.addData("Pose", format(pose));

    /* We further illustrate how to decompose the pose into useful rotational and
    * translational components */
    if (pose != null) {
        VectorF trans = pose.getTranslation();
        Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);

        // Extract the X, Y, and Z components of the offset of the target relative to the robot
        tX = trans.get(0);
        tY = trans.get(1);
        tZ = trans.get(2);

        // Extract the rotational components of the target relative to the robot
        rX = rot.firstAngle;
        rY = rot.secondAngle;
        rZ = rot.thirdAngle;
        //Z is forward-backward
        //x is sideways
    }
}
 
开发者ID:SCHS-Robotics,项目名称:Team9261-2017-2018,代码行数:24,代码来源:MecanumDebug.java

示例3: getVuMarkOrientation

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
private Orientation getVuMarkOrientation()
{
    Orientation targetAngle = null;
    VuforiaTrackable target = vuforia.getTarget(0);
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(target);

    if (vuforia.isTargetVisible(target) && vuMark != RelicRecoveryVuMark.UNKNOWN)
    {
        OpenGLMatrix pose = vuforia.getTargetPose(target);
        if (pose != null)
        {
            targetAngle = Orientation.getOrientation(
                    pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
        }
    }

    return targetAngle;
}
 
开发者ID:trc492,项目名称:FtcSamples,代码行数:19,代码来源:FtcTestVuMark.java

示例4: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override public void runOpMode() {

        // Set up the parameters with which we will use our IMU. Note that integration
        // algorithm here just reports accelerations to the logcat log; it doesn't actually
        // provide positional information.
        BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
        parameters.angleUnit           = BNO055IMU.AngleUnit.DEGREES;
        parameters.accelUnit           = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
        parameters.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode
        parameters.loggingEnabled      = true;
        parameters.loggingTag          = "IMU";
        parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();

        // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
        // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
        // and named "imu".
        imu = hardwareMap.get(BNO055IMU.class, "imu");
        imu.initialize(parameters);

        // Set up our telemetry dashboard
        composeTelemetry();

        // Wait until we're told to go
        waitForStart();

        // Start the logging of measured acceleration
        imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);

        // Loop and update the dashboard
        while (opModeIsActive()) {
            telemetry.update();
        }
    }
 
开发者ID:ykarim,项目名称:FTC2016,代码行数:34,代码来源:SensorAdafruitIMU.java

示例5: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override public void runOpMode() {

        // Set up the parameters with which we will use our IMU. Note that integration
        // algorithm here just reports accelerations to the logcat log; it doesn't actually
        // provide positional information.
        BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
        parameters.angleUnit           = BNO055IMU.AngleUnit.DEGREES;
        parameters.accelUnit           = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
        parameters.calibrationDataFile = "BNO055IMUCalibration.json"; // see the calibration sample opmode
        parameters.loggingEnabled      = true;
        parameters.loggingTag          = "IMU";
        parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();

        // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
        // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
        // and named "imu".
        imu = hardwareMap.get(BNO055IMU.class, "imu");
        imu.initialize(parameters);

        // Set up our telemetry dashboard
        composeTelemetry();

        // Wait until we're told to go
        waitForStart();

        // Start the logging of measured acceleration
        imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);

        // Loop and update the dashboard
        while (opModeIsActive()) {
            telemetry.update();
        }
    }
 
开发者ID:trc492,项目名称:Ftc2018RelicRecovery,代码行数:34,代码来源:SensorBNO055IMU.java

示例6: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override public void runOpMode() throws InterruptedException {

        // Get a reference to the gyroscope from the hardware map
        gyroscope = hardwareMap.get(Gyroscope.class, "gyro");

        // Get a reference to the *implementation* of the gyroscope on the HiTechnic sensor.
        // Usually, you won't need to examine internal implementation details in this way, but
        // we do so to illustrate aspects of what is going on inside the sensor.
        hiTechnicNxtGyroSensor = hardwareMap.get(HiTechnicNxtGyroSensor.class, "gyro");
        // Alternately, we could have cast: hiTechnicNxtGyroSensor = (HiTechnicNxtGyroSensor)gyro;

        // Optionally, calibrate the gyro to establish a good value for its "zero deg/s" bias
        // voltage value. Calibration is not entirely necessary, as the default bias voltage
        // usually does a reasonable job. While calibration is performed, the gyro must remain
        // motionless. Note that for this gyro sensor, calibration data is not persistently
        // written to EEPROM, but rather should be performed each run.
        telemetry.log().add("calibrating...");
        hiTechnicNxtGyroSensor.calibrate(3000, 100);
        telemetry.log().add("...done...waiting for start...");

        // wait for the start button to be pressed.
        waitForStart();
        telemetry.log().clear();

        // loop until the opmode has been asked to stop
        while (opModeIsActive()) {

            double raw = hiTechnicNxtGyroSensor.readRawVoltage();
            double bias = hiTechnicNxtGyroSensor.getBiasVoltage();

            telemetry.addData("rate", "%.4f deg/s",      gyroscope.getAngularVelocity(AngleUnit.DEGREES).zRotationRate);
            telemetry.addData("raw ", "%.4fv",           raw);
            telemetry.addData("bias", "%.4fv",           bias);
            telemetry.addData("volts", "%.4fv",          raw-bias);
            telemetry.addData("deg/s/v", "%.4f deg/s/v", hiTechnicNxtGyroSensor.getDefaultDegreesPerSecondPerVolt());

            telemetry.update();
            idle();
        }
    }
 
开发者ID:trc492,项目名称:Ftc2018RelicRecovery,代码行数:41,代码来源:SensorHTGyro.java

示例7: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
public void runOpMode() throws InterruptedException
{
	setupVuforia();
	
	lastKnownLocation = createMatrix(0,0,0,0,0,0);
	
	waitForStart();
	
	visionTargets.activate();
	while(opModeIsActive())
	{
		OpenGLMatrix latestLocation = listener.getUpdatedRobotLocation();
		vuMark = RelicRecoveryVuMark.from(relicVuMark);
	
		if(latestLocation !=null)
		{lastKnownLocation = latestLocation;}
			
		float[] coordinates = lastKnownLocation.getTranslation().getData();
		
		robotX = coordinates[0];
		robotY = coordinates[1];
		robotAngle = Orientation.getOrientation(lastKnownLocation, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).thirdAngle;
           RelicRecoveryVuMark key = vuMark;
		if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
               telemetry.addData("Navi", "%s visible", vuMark);
           }else{
               telemetry.addData("Navi", "not visible");
               telemetry.addData("Navi Sees:", vuMark);
           }
		telemetry.addData("Last Known Location", formatMatrix(lastKnownLocation));
		telemetry.addData("key",key.toString());
		telemetry.update();
	}
}
 
开发者ID:SCHS-Robotics,项目名称:Team9261-2017-2018,代码行数:35,代码来源:Vuforia.java

示例8: init

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override
public void init() throws InterruptedException {
    motor1 = robot.hardwareMap.dcMotor.get("motor1");
    motor2 = robot.hardwareMap.dcMotor.get("motor2");
    motor3 = robot.hardwareMap.dcMotor.get("motor3");
    motor4 = robot.hardwareMap.dcMotor.get("motor4");

    motor1.setDirection(DcMotor.Direction.FORWARD);
    motor2.setDirection(DcMotor.Direction.REVERSE);
    motor3.setDirection(DcMotor.Direction.FORWARD);
    motor4.setDirection(DcMotor.Direction.REVERSE);

    modeEncoders();
    BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();

    parameters.mode                = BNO055IMU.SensorMode.IMU;
    parameters.angleUnit           = BNO055IMU.AngleUnit.DEGREES;
    parameters.accelUnit           = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
    parameters.loggingEnabled      = false;

    imu = robot.hardwareMap.get(BNO055IMU.class, "imu");

    imu.initialize(parameters);

    robot.telemetry.addData("Mode", "calibrating...");
    robot.telemetry.update();

    while(!imu.isGyroCalibrated()){
        sleep(50);
    }

    robot.telemetry.addData("Mode", "waiting for start");
    robot.telemetry.addData("imu calib status", imu.getCalibrationStatus().toString());
    robot.telemetry.addData("Angle", getAngle());
    robot.telemetry.update();

}
 
开发者ID:SCHS-Robotics,项目名称:Team9261-2017-2018,代码行数:38,代码来源:Drive.java

示例9: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
public void runOpMode() throws InterruptedException
{
    setupVuforia();

    lastKnownLocation = createMatrix(0,0,0,0,0,0);

    waitForStart();

    visionTargets.activate();
    while(opModeIsActive())
    {
        OpenGLMatrix latestLocation = listener.getUpdatedRobotLocation();
        vuMark = RelicRecoveryVuMark.from(relicVuMark);

        if(latestLocation !=null)
        {lastKnownLocation = latestLocation;}

        float[] coordinates = lastKnownLocation.getTranslation().getData();

        robotX = coordinates[0];
        robotY = coordinates[1];
        robotAngle = Orientation.getOrientation(lastKnownLocation, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).thirdAngle;
        if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
            key = vuMark;
        }
    }
}
 
开发者ID:SCHS-Robotics,项目名称:Team9261-2017-2018,代码行数:28,代码来源:BasicVuforia.java

示例10: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override public void runOpMode() throws InterruptedException {

        // Set up the parameters with which we will use our IMU. Note that integration
        // algorithm here just reports accelerations to the logcat log; it doesn't actually
        // provide positional information.
        BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
        parameters.angleUnit           = BNO055IMU.AngleUnit.DEGREES;
        parameters.accelUnit           = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
        parameters.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode
        parameters.loggingEnabled      = true;
        parameters.loggingTag          = "IMU";
        parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();

        // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
        // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
        // and named "imu".
        imu = hardwareMap.get(BNO055IMU.class, "imu");
        imu.initialize(parameters);

        // Set up our telemetry dashboard
        composeTelemetry();

        // Wait until we're told to go
        waitForStart();

        // Start the logging of measured acceleration
        imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);

        // Loop and update the dashboard
        while (opModeIsActive()) {
            telemetry.update();
            idle();
        }
    }
 
开发者ID:forgod01,项目名称:5094-2016-2017,代码行数:35,代码来源:SensorAdafruitIMU.java

示例11: runPeriodic

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override
public void runPeriodic(double elapsedTime)
{
    dashboard.displayPrintf(1, "Angle:x=%6.1f,y=%6.1f,z=%6.1f",
            imu.imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).firstAngle,
            imu.imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).secondAngle,
            imu.imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES).thirdAngle);
    dashboard.displayPrintf(2, "Heading: x=%6.1f,y=%6.1f,z=%6.1f",
            imu.gyro.getXHeading().value,
            imu.gyro.getYHeading().value,
            imu.gyro.getZHeading().value);
    dashboard.displayPrintf(3, "TurnRate: x=%6.1f,y=%6.1f,z=%6.1f",
            imu.gyro.getXRotationRate().value,
            imu.gyro.getYRotationRate().value,
            imu.gyro.getZRotationRate().value);
    dashboard.displayPrintf(4, "Accel: x=%6.1f,y=%6.1f,z=%6.1f",
            imu.accel.getXAcceleration().value,
            imu.accel.getYAcceleration().value,
            imu.accel.getZAcceleration().value);
    dashboard.displayPrintf(5, "Vel: x=%6.1f,y=%6.1f,z=%6.1f",
            imu.accel.getXVelocity().value,
            imu.accel.getYVelocity().value,
            imu.accel.getZVelocity().value);
    dashboard.displayPrintf(6, "Dist: x=%6.1f,y=%6.1f,z=%6.1f",
            imu.accel.getXDistance().value,
            imu.accel.getYDistance().value,
            imu.accel.getZDistance().value);
    dashboard.displayPrintf(7, "Touch=%s", touchSensor.isActive());
    dashboard.displayPrintf(8, "Color=%x,rgb=%f/%f/%f",
            colorSensor.getRawData(0, FtcColorSensor.DataType.COLOR_NUMBER).value.intValue(),
            colorSensor.getRawData(0, FtcColorSensor.DataType.RED).value,
            colorSensor.getRawData(0, FtcColorSensor.DataType.GREEN).value,
            colorSensor.getRawData(0, FtcColorSensor.DataType.BLUE).value);
    dashboard.displayPrintf(9, "HSV=%f/%f/%f",
            colorSensor.getRawData(0, FtcColorSensor.DataType.HUE).value,
            colorSensor.getRawData(0, FtcColorSensor.DataType.SATURATION).value,
            colorSensor.getRawData(0, FtcColorSensor.DataType.VALUE).value);
    dashboard.displayPrintf(10, "Range=%f",
            rangeSensor.getRawData(0, FtcDistanceSensor.DataType.DISTANCE_INCH).value);
}
 
开发者ID:trc492,项目名称:FtcSamples,代码行数:41,代码来源:FtcTestRevHub.java

示例12: runOpMode

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
@Override public void runOpMode() throws InterruptedException {

        // Set up the parameters with which we will use our IMU. Note that integration
        // algorithm here just reports accelerations to the logcat log; it doesn't actually
        // provide positional information.
        BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
        parameters.angleUnit           = BNO055IMU.AngleUnit.DEGREES;
        parameters.accelUnit           = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
        parameters.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode
        parameters.loggingEnabled      = true;
        parameters.loggingTag          = "IMU";
        parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();

        // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
        // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
        // and named "imu".
        imu = hardwareMap.get(BNO055IMU.class, "imu");
        imu.initialize(parameters);

        // Set up our telemetry dashboard
        composeTelemetry();

        // Wait until we're told to go
        waitForStart();

        // Start the logging of measured acceleration
        imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);

        // Loop and update the dashboard
        while (opModeIsActive()) {
            telemetry.update();
        }
    }
 
开发者ID:MHS-FIRSTrobotics,项目名称:RadicalRobotics2017,代码行数:34,代码来源:SensorAdafruitIMU.java

示例13: formatAngle

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
String formatAngle(AngleUnit angleUnit, double angle) {
    return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle));
}
 
开发者ID:ykarim,项目名称:FTC2016,代码行数:4,代码来源:SensorAdafruitIMU.java

示例14: formatDegrees

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
String formatDegrees(double degrees){
    return String.format(Locale.getDefault(), "%.1f", AngleUnit.DEGREES.normalize(degrees));
}
 
开发者ID:ykarim,项目名称:FTC2016,代码行数:4,代码来源:SensorAdafruitIMU.java

示例15: getRawXData

import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; //导入依赖的package包/类
/**
 * This method returns the raw data of the specified type for the x-axis.
 *
 * @param dataType specifies the data type.
 * @return raw data of the specified type for the x-axis.
 */
@Override
public SensorData<Double> getRawXData(DataType dataType)
{
    final String funcName = "getRawXData";
    double value = 0.0;
    long currTagId = FtcOpMode.getLoopCounter();

    if (dataType == DataType.ROTATION_RATE)
    {
        if (currTagId != turnRateTagId)
        {
            turnRateData = imu.getAngularVelocity();
            turnRateTagId = currTagId;
        }
        value = turnRateData.xRotationRate;
    }
    else if (dataType == DataType.HEADING)
    {
        if (currTagId != headingTagId)
        {
            if (USE_QUATERNION)
            {
                getEulerAngles(eulerAngles);
            }
            else
            {
                headingData = imu.getAngularOrientation(
                        AxesReference.INTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
            }
            headingTagId = currTagId;
        }

        if (USE_QUATERNION)
        {
            value = eulerAngles[0];
        }
        else
        {
            value = headingData.firstAngle;
        }
    }
    SensorData<Double> data = new SensorData<>(TrcUtil.getCurrentTime(), value);

    if (debugEnabled)
    {
        dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
        dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
                "=(timestamp:%.3f,value:%f", data.timestamp, data.value);
    }

    return data;
}
 
开发者ID:trc492,项目名称:Ftc2018RelicRecovery,代码行数:59,代码来源:FtcBNO055Imu.java


注:本文中的org.firstinspires.ftc.robotcore.external.navigation.AngleUnit类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。