本文整理汇总了Java中org.opencv.highgui.Highgui.imencode方法的典型用法代码示例。如果您正苦于以下问题:Java Highgui.imencode方法的具体用法?Java Highgui.imencode怎么用?Java Highgui.imencode使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.opencv.highgui.Highgui
的用法示例。
在下文中一共展示了Highgui.imencode方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: imshow
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
/**
* Display image in a frame
*
* @param title
* @param img
*/
public static void imshow(String title, Mat img) {
// Convert image Mat to a jpeg
MatOfByte imageBytes = new MatOfByte();
Highgui.imencode(".jpg", img, imageBytes);
try {
// Put the jpeg bytes into a JFrame window and show.
JFrame frame = new JFrame(title);
frame.getContentPane().add(new JLabel(new ImageIcon(ImageIO.read(new ByteArrayInputStream(imageBytes.toArray())))));
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.pack();
frame.setVisible(true);
frame.setLocation(30 + (windowNo*20), 30 + (windowNo*20));
windowNo++;
} catch (Exception e) {
e.printStackTrace();
}
}
示例2: mat2Image
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
/**
* Convert a {@link Mat} object (OpenCV) in the corresponding {@link Image}
* for JavaFX
*
* @param frame
* the {@link Mat} representing the current frame
* @return the {@link Image} to show
*/
private Image mat2Image(Mat frame)
{
// create a temporary buffer
MatOfByte buffer = new MatOfByte();
// encode the frame in the buffer, according to the bitmap format
if(Highgui.imencode(".bmp", frame, buffer)){
System.out.println("Highgui is successful");
}
else{
System.out.println("Highgui is unsuccessful");
}
// build and return an Image created from the image encoded in the
// buffer
return new Image(new ByteArrayInputStream(buffer.toArray()));
}
示例3: toBufferedImage
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
public static BufferedImage toBufferedImage(Mat source) throws IOException
{
MatOfByte bytemat = new MatOfByte();
try
{
Highgui.imencode(".jpg", source, bytemat);
byte[] bytes = bytemat.toArray();
try (InputStream in = new ByteArrayInputStream(bytes))
{
BufferedImage img = ImageIO.read(in);
return img;
}
catch (Exception e)
{
throw new IOException("Failed to convert OpenCV matrix to buffered image");
}
}
finally
{
if (bytemat != null)
bytemat.release();
}
}
示例4: run
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
@Override
public void run() {
synchronized (this) {
while (runnable) {
if (webSource.grab()) {
try {
webSource.retrieve(fm);
Graphics g = cameraPane.getGraphics();
faceDetector.detectMultiScale(fm, faceDetections);
for (Rect rect : faceDetections.toArray()) {
Imgproc.rectangle(fm, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
// rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);// crop only face
}
Highgui.imencode(".bmp", fm, mem);
Image im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
BufferedImage buff = (BufferedImage) im;
if (g.drawImage(buff, 0, 0, getWidth(), getHeight() - 150, 0, 0, buff.getWidth(), buff.getHeight(), null)) {
if (runnable == false) {
System.out.println("Paused ..... ");
this.wait();
}
}
} catch (Exception ex) {
ex.toString();
}
}
}
}
}
开发者ID:javaspecial,项目名称:Face-detection-and-recognition-desktop-application,代码行数:33,代码来源:FaceRecognizeFrame.java
示例5: run
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
@Override
public void run() {
synchronized (this) {
while (runnable) {
if (webSource.grab()) {
try {
webSource.retrieve(fm);
Graphics g = cameraPane.getGraphics();
faceDetector.detectMultiScale(fm, faceDetections);
for (Rect rect : faceDetections.toArray()) {
Imgproc.rectangle(fm, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
}
Highgui.imencode(".bmp", fm, mem);
Image im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
BufferedImage buff = (BufferedImage) im;
if (g.drawImage(buff, 0, 0, getWidth(), getHeight(), 0, 0, buff.getWidth(), buff.getHeight(), null)) {
if (runnable == false) {
System.out.println("Paused ..... ");
this.wait();
}
}
} catch (Exception ex) {
ex.toString();
}
}
}
}
}
开发者ID:javaspecial,项目名称:Face-detection-and-recognition-desktop-application,代码行数:31,代码来源:FaceDetectorSecurityCamera.java
示例6: run
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
@Override
public void run() {
synchronized (this) {
while (runnable) {
if (webSource.grab()) {
try {
webSource.retrieve(fm);
Graphics g = cameraPane.getGraphics();
faceDetector.detectMultiScale(fm, faceDetections);
for (Rect rect : faceDetections.toArray()) {
Imgproc.rectangle(fm, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);// crop only face
}
Highgui.imencode(".bmp", fm, mem);
Image im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
BufferedImage buff = (BufferedImage) im;
if (g.drawImage(buff, 0, 0, getWidth(), getHeight() - 150, 0, 0, buff.getWidth(), buff.getHeight(), null)) {
if (runnable == false) {
System.out.println("Paused ..... ");
this.wait();
}
}
} catch (Exception ex) {
ex.toString();
}
}
}
}
}
开发者ID:javaspecial,项目名称:Face-detection-and-recognition-desktop-application,代码行数:34,代码来源:FaceDetectCropTest.java
示例7: run
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
@Override
public void run() {
synchronized (this) {
while (runnable) {
if (webSource.grab()) {
try {
webSource.retrieve(fm);
Graphics g = jPanel1.getGraphics();
faceDetector.detectMultiScale(fm, faceDetections);
for (Rect rect : faceDetections.toArray()) {
Imgproc.rectangle(fm, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
}
Highgui.imencode(".bmp", fm, mem);
Image im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
BufferedImage buff = (BufferedImage) im;
if (g.drawImage(buff, 0, 0, getWidth(), getHeight(), 0, 0, buff.getWidth(), buff.getHeight(), null)) {
if (runnable == false) {
System.out.println("Paused ..... ");
this.wait();
}
}
} catch (Exception ex) {
System.out.println("Error");
}
}
}
}
}
示例8: run
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
@Override
public void run()
{
synchronized(this)
{
while(runnable)
{
if(webSource.grab())
{
try
{
webSource.retrieve(frame);
Highgui.imencode(".bmp", frame, mem);
Image im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
BufferedImage buff = (BufferedImage) im;
Graphics g=CamWeb.getGraphics();
if (g.drawImage(buff, 0, 0, getWidth(), getHeight() -150 , 0, 0, buff.getWidth(), buff.getHeight(), null))
if(runnable == false)
{
System.out.println("Going to wait()");
this.wait();
}
}
catch(Exception ex)
{
System.out.println();
}
}
}
}
}
示例9: writeOcrResult
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
private static void writeOcrResult(DigitalType digitalType, DigitalTypeClass digitalTypeClass) throws IOException {
File ocrFile = buildOcrFileName(digitalType, digitalTypeClass);
Mat mat = OpenCvImage.getMatFromSimDocBinaryDat(digitalType.getBytes());
MatOfByte matOfByte = new MatOfByte();
Highgui.imencode(".png", mat, matOfByte, new MatOfInt(Highgui.CV_IMWRITE_PNG_COMPRESSION, 0));
FileUtils.writeByteArrayToFile(ocrFile, matOfByte.toArray());
}
示例10: exportDigitalType
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
private static void exportDigitalType(DigitalType digitalType) throws IOException {
Mat mat = OpenCvImage.rawToMat(digitalType.getBytes());
MatOfByte matOfByte = new MatOfByte();
Highgui.imencode(".png", mat, matOfByte, new MatOfInt(Highgui.CV_IMWRITE_PNG_COMPRESSION, 0));
String outImageName = String.format("%s/%d_%d.png", DEBUG_OUT_DIR, digitalType.y(), digitalType.x());
String outRawImageName = String.format("%s/%d_%d.raw", DEBUG_OUT_DIR, digitalType.y(), digitalType.x());
FileUtils.writeByteArrayToFile(new File(outImageName), matOfByte.toArray());
FileUtils.writeByteArrayToFile(new File(outRawImageName), digitalType.getBytes());
}
示例11: writeImage
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
private void writeImage(DigitalType digitalType) throws IOException {
Mat mat = OpenCvImage.rawToMat(digitalType.getBytes());
MatOfByte matOfByte = new MatOfByte();
Highgui.imencode(".png", mat, matOfByte, new MatOfInt(Highgui.CV_IMWRITE_PNG_COMPRESSION, 0));
String outImageName = String.format("%s/%d_%d.png", imagesDir, digitalType.y(), digitalType.x());
FileUtils.writeByteArrayToFile(new File(outImageName), matOfByte.toArray());
}
示例12: run
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
public void run() {
synchronized (this) {
while (runnable) {
if (webSource.grab()) {
try {
webSource.retrieve(frame);
Graphics g = panel.getGraphics();
faceDetector.detectMultiScale(frame, faceDetections);
for (Rect rect : faceDetections.toArray()) {
Core.rectangle(frame, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
new Scalar(0, 255,0));
int xVal = rect.x;
if(xVal > 250){
System.out.println("left");
PongCourt.ball.left();
PongCourt.ball.setVelocity(-6, PongCourt.ball.velocityY);
}
if(xVal < 250){
System.out.println("right");
PongCourt.ball.setVelocity(6, PongCourt.ball.velocityY);
PongCourt.ball.right();
}
}
Highgui.imencode(".bmp", frame, mem);
Image im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
BufferedImage buff = (BufferedImage) im;
if (g.drawImage(buff, 0, 0, getWidth(), getHeight()-150 , 0, 0, buff.getWidth(), buff.getHeight(), null)) {
if (runnable == false) {
System.out.println("Paused ..... ");
this.wait();
}
}
} catch (Exception ex) {
System.out.println("Error");
}
}
}
}
}
示例13: paint
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
@Override
public void paint(Graphics g) {
webSource.open(0);
timer.start();
webSource.retrieve(frame);
Mat frame2 = frame.clone();
Size sz = new Size(200, 200);
Imgproc.resize(frame2, frame, sz);
faceDetector.detectMultiScale(frame, faceDetections);
for (Rect rect : faceDetections.toArray()) {
Core.rectangle(frame, new org.opencv.core.Point(rect.x, rect.y),
new org.opencv.core.Point(rect.x + rect.width, rect.y + rect.height),
new Scalar(0, 255, 0));
}
Highgui.imencode(".bmp", frame, mem);
Image im = null;
try {
im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));
} catch (IOException e) {
e.printStackTrace();
}
BufferedImage buff = (BufferedImage) im;
if(g.drawImage(buff, 0, 0, getWidth(), getHeight()-150 , 0, 0, buff.getWidth(),
buff.getHeight(), null)){
System.out.println("Success");
}else{
System.out.println("problem");
}
}
示例14: setCompressedImage
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
private void setCompressedImage(Mat inputFrame, tms_ss_rcnn.obj_detectionRequest request) throws IOException {
Time time = mConnectedNode.getCurrentTime();
request.getImage().getHeader().setStamp(time);
request.getImage().getHeader().setFrameId("ObjectDetection");
request.getImage().getHeader().setSeq(sequenceNumber++);
request.getImage().setFormat("jpg");
MatOfByte buf = new MatOfByte();
Highgui.imencode(".jpg", inputFrame, buf);
ChannelBufferOutputStream stream = new ChannelBufferOutputStream(MessageBuffers.dynamicBuffer());
stream.write(buf.toArray());
request.getImage().setData(stream.buffer().copy());
}
示例15: matToBufferedImage
import org.opencv.highgui.Highgui; //导入方法依赖的package包/类
/**
* Convert an OpenCV Mat to an AWT BufferedImage
* @param mat the OpenCV matrix
* @return the equivalent BufferedImage
*/
private BufferedImage matToBufferedImage(Mat mat) {
MatOfByte bytemat = new MatOfByte();
Highgui.imencode(".jpg", mat, bytemat);
byte[] bytes = bytemat.toArray();
BufferedImage im = null;
try {
im = ImageIO.read(new ByteArrayInputStream(bytes));
} catch (IOException ex) {
Logger.getLogger(AcquireSampleImages.class.getName()).log(Level.SEVERE, null, ex);
}
return im;
}