本文整理汇总了Java中org.opencv.videoio.VideoCapture.isOpened方法的典型用法代码示例。如果您正苦于以下问题:Java VideoCapture.isOpened方法的具体用法?Java VideoCapture.isOpened怎么用?Java VideoCapture.isOpened使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.opencv.videoio.VideoCapture
的用法示例。
在下文中一共展示了VideoCapture.isOpened方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: Webcam
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
Webcam() {
cap = new VideoCapture(0);
if (!cap.isOpened()) {
System.out.println("Camera Error");
} else {
System.out.println("Camera OK?");
cap.set(Videoio.CV_CAP_PROP_FRAME_WIDTH, width);
cap.set(Videoio.CV_CAP_PROP_FRAME_HEIGHT, height);
}
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
}
cap.read(mat);
System.out.println("width, height = "+mat.cols()+", "+mat.rows());
}
示例2: grabImage
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
public void grabImage(){
Mat frame = new Mat();
//connect
videoCapture = new VideoCapture(0);
isOpened = videoCapture.isOpened();
System.out.println("connected: " + isOpened);
//setSetting
videoCapture.set(Videoio.CV_CAP_PROP_FRAME_WIDTH, 1280);
videoCapture.set(Videoio.CV_CAP_PROP_FRAME_HEIGHT, 720);
//startGrab
isSucceed = videoCapture.grab();
System.out.println("started: " + String.valueOf(isSucceed));
if ((!isOpened) || (!isSucceed))
return;
System.out.println("------- START GRAB -------");
//Wait for camera starting
while (true){
videoCapture.read(frame);
if (!frame.empty())
break;
}
int frameNo = 0;
long startSysMillis = System.currentTimeMillis();
while (frameNo < 1000){
videoCapture.read(frame);
frameNo++;
}
System.out.println(frameNo + " frames in " + (System.currentTimeMillis() - startSysMillis) + " millis");
videoCapture.release(); // release device
System.out.println('\n' + "Done");
}
示例3: main
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
public static void main(String[] args)
{
System.out.println("Hello, OpenCV");
// Load the native library.
System.loadLibrary("opencv_java244");
VideoCapture camera = new VideoCapture(0);
camera.open(0); //Useless
if(!camera.isOpened()){
System.out.println("Camera Error");
}
else{
System.out.println("Camera OK?");
}
Mat frame = new Mat();
//camera.grab();
//System.out.println("Frame Grabbed");
//camera.retrieve(frame);
//System.out.println("Frame Decoded");
camera.read(frame);
System.out.println("Frame Obtained");
/* No difference
camera.release();
*/
System.out.println("Captured Frame Width " + frame.width());
Imgcodecs.imwrite("camera.jpg", frame);
System.out.println("OK");
}
示例4: CvCamera
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
/**
* Opens a new camera using openCV at a certain device index with
* a given frame width and height, and a compression quality.
*
* @param cam the device index from 0.
* @param width the frame width
* @param height the frame height
* @param quality the compression quality
*
* @throws RuntimeException if the camera could not be opened
*/
public CvCamera(int cam, int width, int height, int quality){
capture = new VideoCapture();
capture.open(cam);
if(!capture.isOpened())
throw new RuntimeException("Unable to open camera " + cam);
image = new Mat();
buffer = new MatOfByte();
compressParams = new MatOfInt(Imgcodecs.CV_IMWRITE_JPEG_QUALITY, quality);
capture.set(Videoio.CAP_PROP_FRAME_WIDTH, width);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, height);
camIndex = cam;
this.quality = quality;
}
示例5: processLiveImageFromWebcam
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
private void processLiveImageFromWebcam() {
VideoCapture videoCapture = new VideoCapture();
System.out.println("Capture device open: " + videoCapture.open(0));
Mat frame = new Mat();
ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
if (videoCapture.isOpened()) {
System.out.println("Camera is on");
service.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
videoCapture.read(frame);
String suffix = LocalTime.now().toString();
Imgcodecs.imwrite("frame" + suffix + ".png", frame);
}
}, 0, 33, TimeUnit.MILLISECONDS);
try {
service.awaitTermination(10, TimeUnit.SECONDS);
} catch (InterruptedException ex) {
Logger.getLogger(OpenCVTest.class.getName()).log(Level.SEVERE, null, ex);
}
} else {
System.err.println("Camera is off");
}
service.shutdown();
videoCapture.release();
}
示例6: makeCamera
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
/**
* Make a connection to a camera.
*
* @param device Camera number.
* @param width Window width in pixels.
* @param height Window height in pixels.
* @param exposure Relative exposure.
* @return
*/
public static VideoCapture makeCamera(int device, int width, int height, double exposure) {
VideoCapture camera = new VideoCapture(0);
camera.set(Videoio.CAP_PROP_FRAME_WIDTH, width);
camera.set(Videoio.CAP_PROP_FRAME_HEIGHT, height);
if (exposure > -1.0) {
System.out.println("\t" + exposure);
camera.set(Videoio.CAP_PROP_AUTO_EXPOSURE, 0);
camera.set(Videoio.CAP_PROP_EXPOSURE, exposure);
}
if (!camera.isOpened()) {
throw new RuntimeException("Camera will not open");
}
return camera;
}
示例7: Video
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
/**
* Constructor
* @param video
*/
public Video(File video, int width, int height){
this.setVideo(video);
cap = new VideoCapture(video.getAbsolutePath()); //video.getAbsolutePath()
if(cap.isOpened()) System.out.println("Opened Media File.");
else System.out.println("Media File is not opened.");
mat = new Mat();
this.width = width;
this.height = height;
}
示例8: init
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
@Override
public void init() {
imageProcessor = new ImageProcessor();
webcamMatImage = new Mat();
capture = new VideoCapture(0);
capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 640);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 480);
if (capture.isOpened()) {
new Thread(new Runnable() {
@Override
public void run() {
BufferedImage tempImage;
while (true) {
capture.read(webcamMatImage);
if (!webcamMatImage.empty()) {
tempImage = imageProcessor.toBufferedImage(webcamMatImage);
if(isFront)
Listeners.getInstance().updateImageFront(tempImage);
else
Listeners.getInstance().updateImageBottom(tempImage);
} else {
System.out.println(" -- Frame not captured -- Break!");
break;
}
try {
Thread.sleep(60);
} catch (InterruptedException e) {
System.out.println("not sleep");
}
}
}
}).start();
} else {
System.out.println("Couldn't open capture.");
}
}
示例9: main
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
public static void main(String args[]) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // Import openCV
Webcam panel = new Webcam(); // Initialize itself
// Initialize JPanel
JFrame frame = new JFrame("Webcam");
frame.setSize(200, 200);
frame.setContentPane(panel);
frame.setVisible(true);
VideoCapture camera = new VideoCapture(0); // The camera
// Attempt to set the frame size, but it doesn't really work. Only just makes it bigger
camera.set(Videoio.CAP_PROP_FRAME_WIDTH, 1900);
camera.set(Videoio.CAP_PROP_FRAME_HEIGHT, 1000);
// Special window listener because there are threads that need to be shutdown on a close
frame.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
e.getWindow().dispose();
camera.release();
System.exit(0);
}
});
if (camera.isOpened()) {
// Create SwingWorker to encapsulate the process in a thread
SwingWorker<Void, Mat> worker = new SwingWorker<Void, Mat>() {
@Override
protected Void doInBackground() throws Exception {
// Put something into thisFrame so it doesn't glitch at the beginning
Mat thisFrame = new Mat();
camera.read(thisFrame);
Imgproc.cvtColor(thisFrame, thisFrame, Imgproc.COLOR_BGR2GRAY); // Convert the diff to gray
// Set up new Mats for diffing them later, manually set the amount of channels to avoid an openCV error
Mat pastFrame = new Mat();
Mat diff = new Mat();
// isCancelled is set by the SwingWorker
while (!isCancelled()) {
thisFrame.copyTo(pastFrame); // What was previously the frame is now the pastFrame
camera.read(thisFrame); // Get camera image, and set it to currentImage
Imgproc.cvtColor(thisFrame, thisFrame, Imgproc.COLOR_BGR2GRAY); // Convert the diff to gray
if (!thisFrame.empty()) {
// Set the frame size to have a nice border around the image
frame.setSize(thisFrame.width() + 40, thisFrame.height() + 60);
Core.absdiff(thisFrame, pastFrame, diff); // Diff the frames
Imgproc.GaussianBlur(diff, diff, new Size(7, 7), 7); // Despeckle
Imgproc.threshold(diff, diff, 5, 255, 1); // Threshhold the gray
image = matrixToBuffer(getFace(thisFrame, diff)); // Update the display image
panel.repaint(); // Refresh the panel
} else {
System.err.println("Error: no frame captured");
}
//Thread.sleep(70); // Set refresh rate, as well as prevent the code from tripping over itself
}
return null;
}
};
worker.execute();
}
return;
}
示例10: calibration
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
public void calibration(VideoCapture capture, EyeDetection e) {
initializeEvetything();
/*
System.out.println("Calibration time!");
System.out.println("Open both your eyes for 2 seconds");
*/
pressAnyKeyToContinue("Calibration time!\nOpen both your eyes for 2 seconds");
boolean first_capture;
first_capture = true;
//capture.open(0); not sure may need
/*
if(capture.isOpened()) {
capture.set(io.CV_CAP_PROP_FRAME_WIDTH, width);
capture.set(io.CV_CAP_PROP_FRAME_HEIGHT, height);
}
*/
while(openEyesList.size() < 2) {
if(!first_capture) {
//System.out.println("Can you please repeat?");
pressAnyKeyToContinue("Can you please repeat?");
} else {
first_capture = false;
}
if(capture.read(openEyesMat)) {
openEyesList = e.eyeDetect(openEyesMat);
}
}
e.filter(openEyesList);
//System.out.println();
pressAnyKeyToContinue("Nice, Now close your eyes for 2 seconds");
capture.release();
capture.open(0);
first_capture = true;
while(closedEyesList.size() < 2) {
if(!capture.isOpened())
capture.open(0);
if(first_capture) {
first_capture = false;
} else {
//System.out.println("Can you please repeat?");
pressAnyKeyToContinue("Can you please repeat?");
}
if(capture.read(closedEyesMat)) {
closedEyesList = e.eyeDetect(closedEyesMat);
}
}
e.filter(closedEyesList);
System.out.println();
capture.release();
}
示例11: runMainLoop
import org.opencv.videoio.VideoCapture; //导入方法依赖的package包/类
private void runMainLoop(String[] args) throws InterruptedException {
ImageViewer imageProcessor = new ImageViewer();
FloodFill floodFill = new FloodFill();
Mat webcamMatImage = new Mat();
Mat mask = new Mat();
Image tempImage;
Image tempImage2;
Image tempImage3;
VideoCapture capture = VideoCaptureFactory.videoCapture(args);
//VideoCapture capture = new VideoCapture("D:\\STUDIA\\floor3.mp4");
capture.set(Videoio.CAP_PROP_FRAME_WIDTH,500);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT,300);
int initFrames = (int) capture.get(Videoio.CAP_PROP_FPS);
if (capture.isOpened()) {
while (true) {
capture.read(webcamMatImage);
if (!webcamMatImage.empty()){
Mat matblurredImage = imageProcessor.blur(webcamMatImage, 1);
mask.create(new Size(webcamMatImage.cols()+2, webcamMatImage.rows()+2), CvType.CV_8UC1);
mask.setTo(new Scalar(0));
//Setting range method for fill flood
floodFill.setRange(FloodFill.FIXED_RANGE);
//Connectivity setting for 8 neighbour pixels
floodFill.setConnectivity(8);
//Lower and Higher difference of pixels
floodFill.setLowerDiff(sliders.getSliderValue("lower diff"));
floodFill.setUpperDiff(sliders.getSliderValue("upper diff"));
//Here you point the coordinates x y of the pixel to get populated
seedPointXperc = sliders.getSliderValue("seed point x");
seedPointYperc = sliders.getSliderValue("seed point y");
// limit seed point coordinates so it does not get out of the image
int seedPointX = Math.min(webcamMatImage.width()*seedPointXperc/100,webcamMatImage.width()-1);
int seedPointY = Math.min(webcamMatImage.height()*seedPointYperc/100,webcamMatImage.height()-1);
floodFill.fill(matblurredImage, mask, seedPointX, seedPointY);
tempImage = imageProcessor.toBufferedImage(matblurredImage);
tempImage2 = imageProcessor.toBufferedImage(webcamMatImage);
Mat temp = matblurredImage.clone();
drawHoughLines(webcamMatImage, temp);
tempImage3 = imageProcessor.toBufferedImage(temp);
ImageIcon imageIcon = new ImageIcon(tempImage, "Floor Detection 0.1");
ImageIcon imageIcon2 = new ImageIcon(tempImage2, "Floor Detection 0.1");
ImageIcon imageIcon3 = new ImageIcon(tempImage3, "Floor Detection 0.1");
imageLabel1.setIcon(imageIcon);
imageLabel1.setText("Flood filling");
imageLabel1.setHorizontalTextPosition(JLabel.CENTER);
imageLabel1.setVerticalTextPosition(JLabel.BOTTOM);
imageLabel2.setIcon(imageIcon2);
imageLabel2.setText("Original input");
imageLabel2.setHorizontalTextPosition(JLabel.CENTER);
imageLabel2.setVerticalTextPosition(JLabel.BOTTOM);
imageLabel3.setIcon(imageIcon3);
imageLabel3.setText("Edge detection approach");
imageLabel3.setHorizontalTextPosition(JLabel.CENTER);
imageLabel3.setVerticalTextPosition(JLabel.BOTTOM);
//delay for frames to be played properly
TimeUnit.MILLISECONDS.sleep(initFrames);
} else {
System.err.println(" -- Frame not captured -- Break!");
break;
}
}
} else {
System.err.println("Couldn't open capture.");
}
}