当前位置: 首页>>代码示例>>Java>>正文


Java CameraServer类代码示例

本文整理汇总了Java中edu.wpi.first.wpilibj.CameraServer的典型用法代码示例。如果您正苦于以下问题:Java CameraServer类的具体用法?Java CameraServer怎么用?Java CameraServer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


CameraServer类属于edu.wpi.first.wpilibj包,在下文中一共展示了CameraServer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: VisionProcessor

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
/**
 * Creates the object and starts the camera server
 * 
 * @param usbPort
 *            The USB camera port number. '0' for default.
 * @param camera
 *            the brand / model of the camera
 */
public VisionProcessor (int usbPort, CameraModel camera)
{
    // Adds the camera to the cscore CameraServer, in order to grab the
    // stream.
    this.camera = CameraServer.getInstance()
            .startAutomaticCapture("Vision Camera", usbPort);

    // Based on the selected camera type, set the field of views and focal
    // length.
    this.cameraModel = camera;
    switch (this.cameraModel)
        {
        // case LIFECAM: //Not enough information to properly find this data.
        // see above.
        // this.horizontalFieldOfView =
        // this.verticalFieldOfView =
        // this.focalLength =
        // break;
        default: // Data will default to one to avoid any "divide by zero"
                 // errors.
            this.horizontalFieldOfView = 1;
            this.verticalFieldOfView = 1;
        }

}
 
开发者ID:FIRST-Team-339,项目名称:2017,代码行数:34,代码来源:VisionProcessor.java

示例2: initDefaultCommand

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public void initDefaultCommand() {
  	visionThread = new Thread(() -> {
	// Get the UsbCamera from CameraServer
	UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
	// Set the resolution
	camera.setResolution(640, 480);

	// Get a CvSink. This will capture Mats from the camera
	CvSink cvSink = CameraServer.getInstance().getVideo();
	// Setup a CvSource. This will send images back to the Dashboard
	CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

	// Mats are very memory expensive. Lets reuse this Mat.
	Mat mat = new Mat();

	// This cannot be 'true'. The program will never exit if it is. This
	// lets the robot stop this thread when restarting robot code or
	// deploying.
	while (!Thread.interrupted()) {
		// Tell the CvSink to grab a frame from the camera and put it
		// in the source mat.  If there is an error notify the output.
		if (cvSink.grabFrame(mat) == 0) {
			// Send the output the error.
			outputStream.notifyError(cvSink.getError());
			// skip the rest of the current iteration
			continue;
		}
		// Put a rectangle on the image
		Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400),
				new Scalar(255, 255, 255), 5);
		// Give the output stream a new image to display
		outputStream.putFrame(mat);
	}
});
visionThread.setDaemon(true);
visionThread.start();
  }
 
开发者ID:FRC6706,项目名称:FRC6706_JAVA2017,代码行数:38,代码来源:VisionSubsystem2.java

示例3: robotInit

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public void robotInit() {
		
	RobotMap.init();
	
	drivetrain = new Drivetrain();
	climber = new Climber();
	// fuel = new Fuel();
	gear = new Gear();
	
	oi = new OI();
	
	// initializes camera server
	server = CameraServer.getInstance();
	// cameraInit();
	// server.startAutomaticCapture(0);
	
	// autonomousCommand = (Command) new AutoMiddleHook();
	autonomousCommand = (Command) new AutoBaseline();
	
	// resets all sensors
	resetAllSensors();

	if (RobotConstants.isTestingEnvironment) updateTestingEnvironment();
	updateSensorDisplay();
}
 
开发者ID:Team4914,项目名称:2017-emmet,代码行数:26,代码来源:Robot.java

示例4: enable

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public void enable() {
    new Thread(() -> {
        cvSink = CameraServer.getInstance().getVideo(cam2);
        while (!Thread.interrupted()) {
            SmartDashboard.putString("Camera", cvSink.getSource().getName());

            cvSink.grabFrame(source);
            outputStream.putFrame(source);

            if (controls.getToggleCamera() && !buttonHeld && !isToggled) {
                isToggled = true;
                cvSink = CameraServer.getInstance().getVideo(cam2);
                System.out.println("toggled");
            } else if (controls.getToggleCamera() && !buttonHeld && isToggled) {
                isToggled = false;
                cvSink = CameraServer.getInstance().getVideo(cam1);
                System.out.println("toggled");
            }
            buttonHeld = controls.getToggleCamera();
        }
    }).start();
}
 
开发者ID:Team334,项目名称:R2017,代码行数:23,代码来源:CameraSet.java

示例5: init

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
/**
 * Initializes the cameras and starts the CameraServer to stream to GRIP /
 * SmartDashboard
 */
public void init() {
/*	gearCam.setFPS(fps);
	gearCam.setResolution(xResolution, yResolution);
	// Use zero exposure for bright vision targets and back light
	// gearCam.setExposureManual(0);

	shooterCam.setFPS(fps);
	shooterCam.setResolution(xResolution, yResolution);
	// Use zero exposure for bright vision targets and back light
	// shooterCam.setExposureManual(0);

	CameraServer.getInstance().addCamera(gearCam);

	CameraServer.getInstance().addCamera(shooterCam);

	CameraServer.getInstance().startAutomaticCapture(gearCam); */
	
	CameraServer.getInstance().startAutomaticCapture(0);

}
 
开发者ID:1757WestwoodRobotics,项目名称:2017-Steamworks,代码行数:25,代码来源:Vision.java

示例6: SpatialAwarenessSubsystem

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public SpatialAwarenessSubsystem() {
  super("SpatialAwarenessSubsystem");

  cameraServer = CameraServer.getInstance();
  gearCamera = cameraServer.startAutomaticCapture(0);
  gearCamera.setResolution(IMG_WIDTH, IMG_HEIGHT);
  gearCamera.setFPS(30);
  gearCamera.setBrightness(7);
  gearCamera.setExposureManual(30);
  gearVideo = cameraServer.getVideo(gearCamera);

  visionProcessing = new VisionProcessing();

  leftUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_LEFT);
  rightUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_RIGHT);

  leftUltrasonicKalman = new SimpleKalman(1.4d, 64d, leftUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS);
  rightUltrasonicKalman = new SimpleKalman(1.4d, 64d, rightUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS);

  navX = new AHRS(SPI.Port.kMXP);

  System.out.println("SpatialAwarenessSubsystem constructor finished");
}
 
开发者ID:FRC-1294,项目名称:frc2017,代码行数:24,代码来源:SpatialAwarenessSubsystem.java

示例7: operatorControl

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public void operatorControl() {
    NIVision.IMAQdxStartAcquisition(session);

    /**
     * grab an image, draw the circle, and provide it for the camera server
     * which will in turn send it to the dashboard.
     */
    NIVision.Rect rect = new NIVision.Rect(10, 10, 100, 100);

    while (isOperatorControl() && isEnabled()) {

        NIVision.IMAQdxGrab(session, frame, 1);
        NIVision.imaqDrawShapeOnImage(frame, frame, rect,
                DrawMode.DRAW_VALUE, ShapeMode.SHAPE_OVAL, 0.0f);
        
        CameraServer.getInstance().setImage(frame);

        /** robot code here! **/
        Timer.delay(0.005);		// wait for a motor update time
    }
    NIVision.IMAQdxStopAcquisition(session);
}
 
开发者ID:wjaneal,项目名称:liastem,代码行数:23,代码来源:Robot.java

示例8: robotInit

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
@Override
public void robotInit() {
	RF = new RobotFunctions();
	chooser.addDefault("Default Auto", defaultAuto);
	chooser.addObject("My Auto", customAuto);
	SmartDashboard.putData("Auto modes", chooser);
	  UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
	    camera.setResolution(IMG_WIDTH, IMG_HEIGHT);
	    
	    visionThread = new VisionThread(camera, new Grip(), pipeline -> {
	        if (pipeline.equals(null)) {
	            Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
	            synchronized (imgLock) {
	                centerX = r.x + (r.width / 2);//Find the centre of the X Value
	                centerY = r.y + (r.height / 2);
	                rw = r.width;
	                rh = r.height;
	            }
	        }
	    });
	    visionThread.start();
}
 
开发者ID:wjaneal,项目名称:liastem,代码行数:23,代码来源:Robot.java

示例9: display

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public void display() {
	NIVision.IMAQdxStartAcquisition(session);

    /**
     * grab an image, draw the circle, and provide it for the camera server
     * which will in turn send it to the dashboard.
     */
    NIVision.Rect rect = new NIVision.Rect(10, 10, 100, 100);

    //while (teleop && enabled) {
    while(enabled) {
        NIVision.IMAQdxGrab(session, frame, 1);
        NIVision.imaqDrawShapeOnImage(frame, frame, rect,
                DrawMode.DRAW_VALUE, ShapeMode.SHAPE_OVAL, 0.0f);
        
        CameraServer.getInstance().setImage(frame);

        /** robot code here! **/
        Timer.delay(0.005);		// wait for a motor update time
    }
    NIVision.IMAQdxStopAcquisition(session);
}
 
开发者ID:FRC-Team-3140,项目名称:FRC-2016,代码行数:23,代码来源:DriveCam.java

示例10: run

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
@Override
public void run() {
	if (sessionStarted) {
		NIVision.IMAQdxStartAcquisition(session);
		running = true;
		while (running) {
			tick++;
			SmartDashboard.putNumber("CameraThread Tick", tick);

			NIVision.IMAQdxGrab(session, frame, 1);

			Image filteredFrame = null;

			NIVision.imaqColorThreshold(filteredFrame, frame, 0,
					ColorMode.HSL, new Range(0, 255), new Range(0, 255),
					new Range(128, 255));

			CameraServer.getInstance().setImage(filteredFrame);

			Timer.delay(0.01);
		}
		NIVision.IMAQdxStopAcquisition(session);
	}
	running = false;
}
 
开发者ID:TeamCautionRobotics,项目名称:2015-FRC-robot,代码行数:26,代码来源:CameraThread.java

示例11: robotInit

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
@Override
public void robotInit() {
    chooser.addDefault("gear", new HangGear());
    chooser.addObject("baseline", new BaseLine());
    SmartDashboard.putData("Auto", chooser);

    oi = new OI();

    UsbCamera cam = CameraServer.getInstance().startAutomaticCapture(0);
    cam.setResolution(640, 480);
    cam.setFPS(60);
    UsbCamera cam1 = CameraServer.getInstance().startAutomaticCapture(1);
    cam1.setResolution(640, 480);
    cam1.setFPS(60);
    SmartDashboard.putString("Robot State:", "started");
    System.out.println("Robot init");
    chassis.startMonitor();
    intaker.startMonitor();

    shooter.setSleepTime(100);
    shooter.startMonitor();

    stirrer.setSleepTime(300);
    stirrer.startMonitor();

    uSensor.setSleepTime(100);
    uSensor.startMonitor();
}
 
开发者ID:FRCteam6414,项目名称:FRC6414program,代码行数:33,代码来源:Robot.java

示例12: setUpCamera

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
public void setUpCamera(){
    CameraServer.getInstance().startAutomaticCapture();
    camera = CameraServer.getInstance().startAutomaticCapture();
    //camera.setResolution(RobotMap.IMG_WIDTH, RobotMap.IMG_HEIGHT);
    //camera.setExposureManual(RobotMap.exposure);
    visionThread = new VisionThread(camera, new GripPipeline(), pipeline -> {
        if (!pipeline.filterContoursOutput().isEmpty()) {
            Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
            synchronized (imgLock) {
                centerX = r.x + (r.width / 2);
            }
        }
    });
}
 
开发者ID:mr-glt,项目名称:FRC-2017-Command,代码行数:15,代码来源:Vision.java

示例13: processImage

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
/**
 * The method that processes the image and inputs it into the particle reports
 */
public void processImage ()
{
    // Gets the error code while getting the new image from the camera.
    // If the error code is not 0, then there is no error.
    long errorCode = CameraServer.getInstance()
            .getVideo("Vision Camera").grabFrame(image);

    if (image.empty())
        {
        System.out.println("Image is Empty! Unable to process image!");
        return;
        }

    if (errorCode == 0)
        {
        System.out.println(
                "There was an error grabbing the image. See below:");
        System.out.println(
                CameraServer.getInstance().getVideo().getError());
        }

    // The process image function found in the AutoGenVision class.
    super.process(image);
    // If this throws an error, make sure the GRIP project ends with a
    // filterContours function.
    this.createParticleReports(super.filterContoursOutput());
    // Sort the particles from largest to smallest
    Arrays.sort(particleReports);
    // for (int i = 0; i < particleReports.length; i++)
    // {
    // System.out.println(i + " " + particleReports[i].area);
    // }
}
 
开发者ID:FIRST-Team-339,项目名称:2017,代码行数:37,代码来源:VisionProcessor.java

示例14: robotInit

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotInit() {
RobotMap.init();
    // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    driveTrain = new DriveTrain();
    shooter = new Shooter();
    lift = new Lift();

// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    // OI must be constructed after subsystems. If the OI creates Commands
    //(which it very likely will), subsystems are not guaranteed to be
    // constructed yet. Thus, their requires() statements may grab null
    // pointers. Bad news. Don't move it.
    oi = new OI();

    // instantiate the command used for the autonomous period
    // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS

    autonomousCommand = new AutonomousCommand();

// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
    CameraServer cameraServer = CameraServer.getInstance();
    System.out.println("Camera sources:" + VideoSource.enumerateSources().length);
    for (VideoSource videoSource : VideoSource.enumerateSources()) {
    	System.out.println("Camera: " + videoSource.getName());
    }
    
    UsbCamera  camera= cameraServer.startAutomaticCapture();
    System.out.println("Started camera capture.");
 // Hard coded camera address
    cameraServer.addAxisCamera("AxisCam ye", "10.26.67.42");
  //  visionThread = new VisionThread(camera,new GripPipeline());

    driveTrainChooser = new SendableChooser();
    driveTrainChooser.addDefault("default PWM", DriveTrain.DriveMode.PWM);
    for (DriveTrain.DriveMode driveMode : DriveTrain.DriveMode.values()) {
        driveTrainChooser.addObject(driveMode.name(), driveMode);
    }
}
 
开发者ID:Team2667,项目名称:SteamWorks,代码行数:43,代码来源:Robot.java

示例15: robotInit

import edu.wpi.first.wpilibj.CameraServer; //导入依赖的package包/类
/**
 * This function is run when the robot is first started up and should be
 * used for any initialization code.
 */
public void robotInit() {
RobotMap.init();
    // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    driveTrain = new DriveTrain();
    shooter = new Shooter();

// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
    // OI must be constructed after subsystems. If the OI creates Commands
    //(which it very likely will), subsystems are not guaranteed to be
    // constructed yet. Thus, their requires() statements may grab null
    // pointers. Bad news. Don't move it.
    oi = new OI();

    // instantiate the command used for the autonomous period
    // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS

    autonomousCommand = new AutonomousCommand();

// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
    CameraServer cameraServer = CameraServer.getInstance();
    System.out.println("Camera sources:" + VideoSource.enumerateSources().length);
    for (VideoSource videoSource : VideoSource.enumerateSources()) {
    	System.out.println("Camera: " + videoSource.getName());
    }
    
    UsbCamera  camera= cameraServer.startAutomaticCapture();
    System.out.println("Started camera capture.");
 // Hard coded camera address
    cameraServer.addAxisCamera("AxisCam ye", "10.26.67.42");
  //  visionThread = new VisionThread(camera,new GripPipeline());    
}
 
开发者ID:Team2667,项目名称:SteamWorks,代码行数:36,代码来源:Robot.java


注:本文中的edu.wpi.first.wpilibj.CameraServer类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。