本文整理匯總了Java中edu.wpi.first.wpilibj.CameraServer類的典型用法代碼示例。如果您正苦於以下問題:Java CameraServer類的具體用法?Java CameraServer怎麽用?Java CameraServer使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
CameraServer類屬於edu.wpi.first.wpilibj包,在下文中一共展示了CameraServer類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: VisionProcessor
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
/**
* Creates the object and starts the camera server
*
* @param usbPort
* The USB camera port number. '0' for default.
* @param camera
* the brand / model of the camera
*/
public VisionProcessor (int usbPort, CameraModel camera)
{
// Adds the camera to the cscore CameraServer, in order to grab the
// stream.
this.camera = CameraServer.getInstance()
.startAutomaticCapture("Vision Camera", usbPort);
// Based on the selected camera type, set the field of views and focal
// length.
this.cameraModel = camera;
switch (this.cameraModel)
{
// case LIFECAM: //Not enough information to properly find this data.
// see above.
// this.horizontalFieldOfView =
// this.verticalFieldOfView =
// this.focalLength =
// break;
default: // Data will default to one to avoid any "divide by zero"
// errors.
this.horizontalFieldOfView = 1;
this.verticalFieldOfView = 1;
}
}
示例2: initDefaultCommand
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public void initDefaultCommand() {
visionThread = new Thread(() -> {
// Get the UsbCamera from CameraServer
UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
// Set the resolution
camera.setResolution(640, 480);
// Get a CvSink. This will capture Mats from the camera
CvSink cvSink = CameraServer.getInstance().getVideo();
// Setup a CvSource. This will send images back to the Dashboard
CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);
// Mats are very memory expensive. Lets reuse this Mat.
Mat mat = new Mat();
// This cannot be 'true'. The program will never exit if it is. This
// lets the robot stop this thread when restarting robot code or
// deploying.
while (!Thread.interrupted()) {
// Tell the CvSink to grab a frame from the camera and put it
// in the source mat. If there is an error notify the output.
if (cvSink.grabFrame(mat) == 0) {
// Send the output the error.
outputStream.notifyError(cvSink.getError());
// skip the rest of the current iteration
continue;
}
// Put a rectangle on the image
Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400),
new Scalar(255, 255, 255), 5);
// Give the output stream a new image to display
outputStream.putFrame(mat);
}
});
visionThread.setDaemon(true);
visionThread.start();
}
示例3: robotInit
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public void robotInit() {
RobotMap.init();
drivetrain = new Drivetrain();
climber = new Climber();
// fuel = new Fuel();
gear = new Gear();
oi = new OI();
// initializes camera server
server = CameraServer.getInstance();
// cameraInit();
// server.startAutomaticCapture(0);
// autonomousCommand = (Command) new AutoMiddleHook();
autonomousCommand = (Command) new AutoBaseline();
// resets all sensors
resetAllSensors();
if (RobotConstants.isTestingEnvironment) updateTestingEnvironment();
updateSensorDisplay();
}
示例4: enable
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public void enable() {
new Thread(() -> {
cvSink = CameraServer.getInstance().getVideo(cam2);
while (!Thread.interrupted()) {
SmartDashboard.putString("Camera", cvSink.getSource().getName());
cvSink.grabFrame(source);
outputStream.putFrame(source);
if (controls.getToggleCamera() && !buttonHeld && !isToggled) {
isToggled = true;
cvSink = CameraServer.getInstance().getVideo(cam2);
System.out.println("toggled");
} else if (controls.getToggleCamera() && !buttonHeld && isToggled) {
isToggled = false;
cvSink = CameraServer.getInstance().getVideo(cam1);
System.out.println("toggled");
}
buttonHeld = controls.getToggleCamera();
}
}).start();
}
示例5: init
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
/**
* Initializes the cameras and starts the CameraServer to stream to GRIP /
* SmartDashboard
*/
public void init() {
/* gearCam.setFPS(fps);
gearCam.setResolution(xResolution, yResolution);
// Use zero exposure for bright vision targets and back light
// gearCam.setExposureManual(0);
shooterCam.setFPS(fps);
shooterCam.setResolution(xResolution, yResolution);
// Use zero exposure for bright vision targets and back light
// shooterCam.setExposureManual(0);
CameraServer.getInstance().addCamera(gearCam);
CameraServer.getInstance().addCamera(shooterCam);
CameraServer.getInstance().startAutomaticCapture(gearCam); */
CameraServer.getInstance().startAutomaticCapture(0);
}
示例6: SpatialAwarenessSubsystem
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public SpatialAwarenessSubsystem() {
super("SpatialAwarenessSubsystem");
cameraServer = CameraServer.getInstance();
gearCamera = cameraServer.startAutomaticCapture(0);
gearCamera.setResolution(IMG_WIDTH, IMG_HEIGHT);
gearCamera.setFPS(30);
gearCamera.setBrightness(7);
gearCamera.setExposureManual(30);
gearVideo = cameraServer.getVideo(gearCamera);
visionProcessing = new VisionProcessing();
leftUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_LEFT);
rightUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_RIGHT);
leftUltrasonicKalman = new SimpleKalman(1.4d, 64d, leftUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS);
rightUltrasonicKalman = new SimpleKalman(1.4d, 64d, rightUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS);
navX = new AHRS(SPI.Port.kMXP);
System.out.println("SpatialAwarenessSubsystem constructor finished");
}
示例7: operatorControl
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public void operatorControl() {
NIVision.IMAQdxStartAcquisition(session);
/**
* grab an image, draw the circle, and provide it for the camera server
* which will in turn send it to the dashboard.
*/
NIVision.Rect rect = new NIVision.Rect(10, 10, 100, 100);
while (isOperatorControl() && isEnabled()) {
NIVision.IMAQdxGrab(session, frame, 1);
NIVision.imaqDrawShapeOnImage(frame, frame, rect,
DrawMode.DRAW_VALUE, ShapeMode.SHAPE_OVAL, 0.0f);
CameraServer.getInstance().setImage(frame);
/** robot code here! **/
Timer.delay(0.005); // wait for a motor update time
}
NIVision.IMAQdxStopAcquisition(session);
}
示例8: robotInit
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
@Override
public void robotInit() {
RF = new RobotFunctions();
chooser.addDefault("Default Auto", defaultAuto);
chooser.addObject("My Auto", customAuto);
SmartDashboard.putData("Auto modes", chooser);
UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
camera.setResolution(IMG_WIDTH, IMG_HEIGHT);
visionThread = new VisionThread(camera, new Grip(), pipeline -> {
if (pipeline.equals(null)) {
Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
synchronized (imgLock) {
centerX = r.x + (r.width / 2);//Find the centre of the X Value
centerY = r.y + (r.height / 2);
rw = r.width;
rh = r.height;
}
}
});
visionThread.start();
}
示例9: display
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public void display() {
NIVision.IMAQdxStartAcquisition(session);
/**
* grab an image, draw the circle, and provide it for the camera server
* which will in turn send it to the dashboard.
*/
NIVision.Rect rect = new NIVision.Rect(10, 10, 100, 100);
//while (teleop && enabled) {
while(enabled) {
NIVision.IMAQdxGrab(session, frame, 1);
NIVision.imaqDrawShapeOnImage(frame, frame, rect,
DrawMode.DRAW_VALUE, ShapeMode.SHAPE_OVAL, 0.0f);
CameraServer.getInstance().setImage(frame);
/** robot code here! **/
Timer.delay(0.005); // wait for a motor update time
}
NIVision.IMAQdxStopAcquisition(session);
}
示例10: run
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
@Override
public void run() {
if (sessionStarted) {
NIVision.IMAQdxStartAcquisition(session);
running = true;
while (running) {
tick++;
SmartDashboard.putNumber("CameraThread Tick", tick);
NIVision.IMAQdxGrab(session, frame, 1);
Image filteredFrame = null;
NIVision.imaqColorThreshold(filteredFrame, frame, 0,
ColorMode.HSL, new Range(0, 255), new Range(0, 255),
new Range(128, 255));
CameraServer.getInstance().setImage(filteredFrame);
Timer.delay(0.01);
}
NIVision.IMAQdxStopAcquisition(session);
}
running = false;
}
示例11: robotInit
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
@Override
public void robotInit() {
chooser.addDefault("gear", new HangGear());
chooser.addObject("baseline", new BaseLine());
SmartDashboard.putData("Auto", chooser);
oi = new OI();
UsbCamera cam = CameraServer.getInstance().startAutomaticCapture(0);
cam.setResolution(640, 480);
cam.setFPS(60);
UsbCamera cam1 = CameraServer.getInstance().startAutomaticCapture(1);
cam1.setResolution(640, 480);
cam1.setFPS(60);
SmartDashboard.putString("Robot State:", "started");
System.out.println("Robot init");
chassis.startMonitor();
intaker.startMonitor();
shooter.setSleepTime(100);
shooter.startMonitor();
stirrer.setSleepTime(300);
stirrer.startMonitor();
uSensor.setSleepTime(100);
uSensor.startMonitor();
}
示例12: setUpCamera
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
public void setUpCamera(){
CameraServer.getInstance().startAutomaticCapture();
camera = CameraServer.getInstance().startAutomaticCapture();
//camera.setResolution(RobotMap.IMG_WIDTH, RobotMap.IMG_HEIGHT);
//camera.setExposureManual(RobotMap.exposure);
visionThread = new VisionThread(camera, new GripPipeline(), pipeline -> {
if (!pipeline.filterContoursOutput().isEmpty()) {
Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0));
synchronized (imgLock) {
centerX = r.x + (r.width / 2);
}
}
});
}
示例13: processImage
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
/**
* The method that processes the image and inputs it into the particle reports
*/
public void processImage ()
{
// Gets the error code while getting the new image from the camera.
// If the error code is not 0, then there is no error.
long errorCode = CameraServer.getInstance()
.getVideo("Vision Camera").grabFrame(image);
if (image.empty())
{
System.out.println("Image is Empty! Unable to process image!");
return;
}
if (errorCode == 0)
{
System.out.println(
"There was an error grabbing the image. See below:");
System.out.println(
CameraServer.getInstance().getVideo().getError());
}
// The process image function found in the AutoGenVision class.
super.process(image);
// If this throws an error, make sure the GRIP project ends with a
// filterContours function.
this.createParticleReports(super.filterContoursOutput());
// Sort the particles from largest to smallest
Arrays.sort(particleReports);
// for (int i = 0; i < particleReports.length; i++)
// {
// System.out.println(i + " " + particleReports[i].area);
// }
}
示例14: robotInit
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
RobotMap.init();
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
driveTrain = new DriveTrain();
shooter = new Shooter();
lift = new Lift();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
// OI must be constructed after subsystems. If the OI creates Commands
//(which it very likely will), subsystems are not guaranteed to be
// constructed yet. Thus, their requires() statements may grab null
// pointers. Bad news. Don't move it.
oi = new OI();
// instantiate the command used for the autonomous period
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
autonomousCommand = new AutonomousCommand();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
CameraServer cameraServer = CameraServer.getInstance();
System.out.println("Camera sources:" + VideoSource.enumerateSources().length);
for (VideoSource videoSource : VideoSource.enumerateSources()) {
System.out.println("Camera: " + videoSource.getName());
}
UsbCamera camera= cameraServer.startAutomaticCapture();
System.out.println("Started camera capture.");
// Hard coded camera address
cameraServer.addAxisCamera("AxisCam ye", "10.26.67.42");
// visionThread = new VisionThread(camera,new GripPipeline());
driveTrainChooser = new SendableChooser();
driveTrainChooser.addDefault("default PWM", DriveTrain.DriveMode.PWM);
for (DriveTrain.DriveMode driveMode : DriveTrain.DriveMode.values()) {
driveTrainChooser.addObject(driveMode.name(), driveMode);
}
}
示例15: robotInit
import edu.wpi.first.wpilibj.CameraServer; //導入依賴的package包/類
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
RobotMap.init();
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
driveTrain = new DriveTrain();
shooter = new Shooter();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
// OI must be constructed after subsystems. If the OI creates Commands
//(which it very likely will), subsystems are not guaranteed to be
// constructed yet. Thus, their requires() statements may grab null
// pointers. Bad news. Don't move it.
oi = new OI();
// instantiate the command used for the autonomous period
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
autonomousCommand = new AutonomousCommand();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS
CameraServer cameraServer = CameraServer.getInstance();
System.out.println("Camera sources:" + VideoSource.enumerateSources().length);
for (VideoSource videoSource : VideoSource.enumerateSources()) {
System.out.println("Camera: " + videoSource.getName());
}
UsbCamera camera= cameraServer.startAutomaticCapture();
System.out.println("Started camera capture.");
// Hard coded camera address
cameraServer.addAxisCamera("AxisCam ye", "10.26.67.42");
// visionThread = new VisionThread(camera,new GripPipeline());
}