当前位置: 首页>>代码示例>>Java>>正文


Java ColorSpace类代码示例

本文整理汇总了Java中org.jcodec.common.model.ColorSpace的典型用法代码示例。如果您正苦于以下问题:Java ColorSpace类的具体用法?Java ColorSpace怎么用?Java ColorSpace使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ColorSpace类属于org.jcodec.common.model包,在下文中一共展示了ColorSpace类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: transcode

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public List<ByteBuffer> transcode() throws IOException {
    H264Decoder decoder = new H264Decoder();
    decoder.addSps(avcC.getSpsList());
    decoder.addPps(avcC.getPpsList());
    Picture buf = Picture.create(mbW << 4, mbH << 4, ColorSpace.YUV420);
    Frame dec = null;
    for (VirtualPacket virtualPacket : head) {
        dec = decoder.decodeFrame(H264Utils.splitMOVPacket(virtualPacket.getData(), avcC), buf.getData());
    }
    H264Encoder encoder = new H264Encoder(rc);
    ByteBuffer tmp = ByteBuffer.allocate(frameSize);

    List<ByteBuffer> result = new ArrayList<ByteBuffer>();
    for (VirtualPacket pkt : tail) {
        dec = decoder.decodeFrame(H264Utils.splitMOVPacket(pkt.getData(), avcC), buf.getData());

        tmp.clear();
        ByteBuffer res = encoder.encodeFrame(dec, tmp);
        ByteBuffer out = ByteBuffer.allocate(frameSize);
        processFrame(res, out);

        result.add(out);
    }

    return result;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:27,代码来源:AVCClipTrack.java

示例2: SequenceEncoderMp4

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public SequenceEncoderMp4(File out)
        throws IOException
{
    super(out);
    this.ch = NIOUtils.writableFileChannel(out);

    // Muxer that will store the encoded frames
    muxer = new MP4Muxer(ch, Brand.MP4);

    // Add video track to muxer
    outTrack = muxer.addTrack(TrackType.VIDEO, 5);

    // Allocate a buffer big enough to hold output frames
    _out = ByteBuffer.allocate(1920 * 1080 * 6);

    // Create an instance of encoder
    encoder = new H264Encoder();

    // Transform to convert between RGB and YUV
    transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);

    // Encoder extra data ( SPS, PPS ) to be stored in a special place of
    // MP4
    spsList = new ArrayList<ByteBuffer>();
    ppsList = new ArrayList<ByteBuffer>();
}
 
开发者ID:hiliving,项目名称:P2Video-master,代码行数:27,代码来源:SequenceEncoderMp4.java

示例3: SequenceEncoderMp4

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public SequenceEncoderMp4(File out)
        throws IOException
{
    super(out);
    this.ch = NIOUtils.writableFileChannel(out);

    // Muxer that will store the encoded frames
    muxer = new MP4Muxer(ch, Brand.MP4);

    // Add video track to muxer
    outTrack = muxer.addTrack(TrackType.VIDEO, timeScale);

    // Allocate a buffer big enough to hold output frames
    _out = ByteBuffer.allocate(1920 * 1080 * 6);

    // Create an instance of encoder
    encoder = new H264Encoder();

    // Transform to convert between RGB and YUV
    transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);

    // Encoder extra data ( SPS, PPS ) to be stored in a special place of
    // MP4
    spsList = new ArrayList<ByteBuffer>();
    ppsList = new ArrayList<ByteBuffer>();
}
 
开发者ID:ynztlxdeai,项目名称:ImageToVideo,代码行数:27,代码来源:SequenceEncoderMp4.java

示例4: toColorArray

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public static int[] toColorArray(Picture src){
	if (src.getColor() != ColorSpace.RGB) {
           Transform transform = ColorUtil.getTransform(src.getColor(), ColorSpace.RGB);
           Picture rgb = Picture.create(src.getWidth(), src.getHeight(), ColorSpace.RGB, src.getCrop());
           transform.transform(src, rgb);
           src = rgb;
       }
	
	int[] _return = new int[src.getCroppedWidth() * src.getCroppedHeight()];
	
	int[] data = src.getPlaneData(0);
	
	for(int i = 0; i < _return.length; ++i){
		_return[i] = ReadableRGBContainer.toIntColor(data[3*i + 2], data[3*i + 1], data[3*i]);
	}
	
	return _return;
}
 
开发者ID:vitrivr,项目名称:cineast,代码行数:19,代码来源:PictureUtil.java

示例5: open

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
@Override
public void open(String _path, int width, int _height, int _fps) throws IOException {
    path = _path;
    height = _height;
    fps = _fps;

    ch = new FileChannelWrapper(FileChannel.open(Paths.get(path), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING));
    // Muxer that will store the encoded frames
    muxer = new MP4Muxer(ch, Brand.MP4);
    // Add video track to muxer
    outTrack = muxer.addTrack(TrackType.VIDEO, fps);
    // Allocate a buffer big enough to hold output frames
    _out = ByteBuffer.allocateDirect(width * height * 6);
    // Create an instance of encoder
    encoder = new H264Encoder(new JCodecUtils.JHVRateControl(20));
    // Encoder extra data ( SPS, PPS ) to be stored in a special place of MP4
    spsList = new ArrayList<>();
    ppsList = new ArrayList<>();
    toEncode = Picture.create(width, height, ColorSpace.YUV420J);
}
 
开发者ID:Helioviewer-Project,项目名称:JHelioviewer-SWHV,代码行数:21,代码来源:JCodecExporter.java

示例6: makeFrame

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
private Picture makeFrame(BufferedImage bi) {
    DataBuffer imageData = bi.getRaster().getDataBuffer();
    int[] yPixel = new int[imageData.getSize()];
    int[] uPixel = new int[imageData.getSize() >> 2];
    int[] vPixel = new int[imageData.getSize() >> 2];
    int ipx = 0, uvOffset = 0;

    for (int h = 0; h < bi.getHeight(); h++) {
        for (int w = 0; w < bi.getWidth(); w++) {
            int elem = imageData.getElem(ipx);
            int r = 0x0ff & (elem >>> 16);
            int g = 0x0ff & (elem >>> 8);
            int b = 0x0ff & elem;
            yPixel[ipx] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
            if ((0 != w % 2) && (0 != h % 2)) {
                uPixel[uvOffset] = ((-38 * r + -74 * g + 112 * b) >> 8) + 128;
                vPixel[uvOffset] = ((112 * r + -94 * g + -18 * b) >> 8) + 128;
                uvOffset++;
            }
            ipx++;
        }
    }
    int[][] pix = {yPixel, uPixel, vPixel, null};
    return new Picture(bi.getWidth(), bi.getHeight(), pix, ColorSpace.YUV420);
}
 
开发者ID:kamil-karkus,项目名称:EasySnap,代码行数:26,代码来源:Encoder.java

示例7: encodeImage

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
    if (toEncode == null) {
        toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
    }

    // Perform conversion
    for (int i = 0; i < 3; i++) {
        Arrays.fill(toEncode.getData()[i], 0);
    }
    transform.transform(AWTUtil.fromBufferedImage(bi), toEncode);

    // Encode image into H.264 frame, the result is stored in '_out' buffer
    _out.clear();
    ByteBuffer result = encoder.encodeFrame(_out, toEncode);

    // Based on the frame above form correct MP4 packet
    spsList.clear();
    ppsList.clear();
    H264Utils.encodeMOVPacket(result, spsList, ppsList);

    // Add packet to video track
    outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));

    frameNo++;
}
 
开发者ID:deepakpk009,项目名称:JScreenRecorder,代码行数:26,代码来源:SequenceEncoder.java

示例8: decodeScan

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
private Picture decodeScan(ByteBuffer data, FrameHeader header, ScanHeader scan, VLC[] huffTables, int[][] quant,
        int[][] data2, int field, int step) {
    int blockW = header.getHmax();
    int blockH = header.getVmax();
    int mcuW = blockW << 3;
    int mcuH = blockH << 3;

    int width = header.width;
    int height = header.height;

    int xBlocks = (width + mcuW - 1) >> (blockW + 2);
    int yBlocks = (height + mcuH - 1) >> (blockH + 2);

    int nn = blockW + blockH;
    Picture result = new Picture(xBlocks << (blockW + 2), yBlocks << (blockH + 2), data2,
            nn == 4 ? ColorSpace.YUV420J : (nn == 3 ? ColorSpace.YUV422J : ColorSpace.YUV444J), new Rect(0, 0,
                    width, height));

    BitReader bits = new BitReader(data);
    int[] dcPredictor = new int[] { 1024, 1024, 1024 };
    for (int by = 0; by < yBlocks; by++)
        for (int bx = 0; bx < xBlocks && bits.moreData(); bx++)
            decodeMCU(bits, dcPredictor, quant, huffTables, result, bx, by, blockW, blockH, field, step);

    return result;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:27,代码来源:JpegDecoder.java

示例9: initSPS

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public SeqParameterSet initSPS(Size sz) {
    SeqParameterSet sps = new SeqParameterSet();
    sps.pic_width_in_mbs_minus1 = ((sz.getWidth() + 15) >> 4) - 1;
    sps.pic_height_in_map_units_minus1 = ((sz.getHeight() + 15) >> 4) - 1;
    sps.chroma_format_idc = ColorSpace.YUV420;
    sps.profile_idc = 66;
    sps.level_idc = 40;
    sps.frame_mbs_only_flag = true;

    int codedWidth = (sps.pic_width_in_mbs_minus1 + 1) << 4;
    int codedHeight = (sps.pic_height_in_map_units_minus1 + 1) << 4;
    sps.frame_cropping_flag = codedWidth != sz.getWidth() || codedHeight != sz.getHeight();
    sps.frame_crop_right_offset = (codedWidth - sz.getWidth() + 1) >> 1;
    sps.frame_crop_bottom_offset = (codedHeight - sz.getHeight() + 1) >> 1;

    return sps;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:18,代码来源:H264Encoder.java

示例10: deblockFrame

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public void deblockFrame(Picture result) {
        ColorSpace color = result.getColor();
        // for (int i = 0; i < shs.length; i++)
        // printMB(result.getPlaneData(2), result.getPlaneWidth(2), i, shs[i],
        // "!--!--!--!--!--!--!--!--!--!--!--!");
//        printMB(result.getPlaneData(0), result.getPlaneWidth(0), 0, shs[0], "!--!--!--!--!--!--!--!--!--!--!--!");
        int[][] bsV = new int[4][4], bsH = new int[4][4];
        for (int i = 0; i < shs.length; i++) {
            calcBsH(result, i, bsH);
            calcBsV(result, i, bsV);
            for (int c = 0; c < color.nComp; c++) {
                fillVerticalEdge(result, c, i, bsV);
                fillHorizontalEdge(result, c, i, bsH);
                // printMB(result.getPlaneData(1), result.getPlaneWidth(1), i,
                // shs[i],
                // "!**!**!**!**!--!--!--!--!--!--!--!");
            }
        }
        // printMB(result.getPlaneData(0), result.getPlaneWidth(0), 235,
        // shs[235], "!**!**!**!**!--!--!--!--!--!--!--!");
    }
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:22,代码来源:DeblockingFilter.java

示例11: encodeFrame

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public ByteBuffer encodeFrame(Picture picture) {
    if (picture.getColor() != ColorSpace.RGB)
        throw new IllegalArgumentException("Only RGB image can be stored in PPM");
    ByteBuffer buffer = ByteBuffer.allocate(picture.getWidth() * picture.getHeight() * 3 + 200);
    buffer.put(JCodecUtil.asciiString("P6 " + picture.getWidth() + " " + picture.getHeight() + " 255\n"));

    int[][] data = picture.getData();
    for (int i = 0; i < picture.getWidth() * picture.getHeight() * 3; i += 3) {
        buffer.put((byte) data[0][i + 2]);
        buffer.put((byte) data[0][i + 1]);
        buffer.put((byte) data[0][i]);
    }

    buffer.flip();

    return buffer;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:18,代码来源:PPMEncoder.java

示例12: nextFrame

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public Picture nextFrame(int[][] buffer) throws IOException {
    if (invalidFormat != null)
        throw new RuntimeException("Invalid input: " + invalidFormat);
    long pos = is.position();
    ByteBuffer buf = NIOUtils.fetchFrom(is, 2048);
    String frame = readLine(buf);
    if (frame == null || !frame.startsWith("FRAME"))
        return null;

    MappedByteBuffer pix = is.map(MapMode.READ_ONLY, pos + buf.position(), bufSize);
    is.position(pos + buf.position() + bufSize);

    Picture create = Picture.create(width, height, ColorSpace.YUV420);
    copy(pix, create.getPlaneData(0));
    copy(pix, create.getPlaneData(1));
    copy(pix, create.getPlaneData(2));

    return create;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:20,代码来源:Y4MDecoder.java

示例13: SequenceEncoder

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public SequenceEncoder(File out) throws IOException {
    this.ch = NIOUtils.writableFileChannel(out);

    // Muxer that will store the encoded frames
    muxer = new MP4Muxer(ch, Brand.MP4);

    // Add video track to muxer
    outTrack = muxer.addTrack(TrackType.VIDEO, 25);

    // Allocate a buffer big enough to hold output frames
    _out = ByteBuffer.allocate(1920 * 1080 * 6);

    // Create an instance of encoder
    encoder = new H264Encoder();

    // Transform to convert between RGB and YUV
    transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);

    // Encoder extra data ( SPS, PPS ) to be stored in a special place of
    // MP4
    spsList = new ArrayList<ByteBuffer>();
    ppsList = new ArrayList<ByteBuffer>();

}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:25,代码来源:SequenceEncoder.java

示例14: toBufferedImage

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
public static BufferedImage toBufferedImage(Picture src) {
    if (src.getColor() != ColorSpace.RGB) {
        Transform transform = ColorUtil.getTransform(src.getColor(), ColorSpace.RGB);
        Picture rgb = Picture.create(src.getWidth(), src.getHeight(), ColorSpace.RGB, src.getCrop());
        transform.transform(src, rgb);
        src = rgb;
    }

    BufferedImage dst = new BufferedImage(src.getCroppedWidth(), src.getCroppedHeight(),
            BufferedImage.TYPE_3BYTE_BGR);

    if (src.getCrop() == null)
        toBufferedImage(src, dst);
    else
        toBufferedImageCropped(src, dst);

    return dst;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:19,代码来源:AWTUtil.java

示例15: test

import org.jcodec.common.model.ColorSpace; //导入依赖的package包/类
private boolean test(File coded, File ref) throws IOException {
    MappedH264ES es = new MappedH264ES(NIOUtils.fetchFrom(coded));
    Picture buf = Picture.create(1920, 1088, ColorSpace.YUV420);
    H264Decoder dec = new H264Decoder();
    Packet nextFrame;
    ByteBuffer _yuv = NIOUtils.fetchFrom(ref);
    while ((nextFrame = es.nextFrame()) != null) {
        Picture out = dec.decodeFrame(nextFrame.getData(), buf.getData()).cropped();
        Picture pic = out.createCompatible();
        pic.copyFrom(out);
        int lumaSize = pic.getWidth() * pic.getHeight();
        int crSize = lumaSize >> 2;
        int cbSize = lumaSize >> 2;

        ByteBuffer yuv = NIOUtils.read(_yuv, lumaSize + crSize + cbSize);

        if (!Arrays.equals(getAsIntArray(yuv, lumaSize), pic.getPlaneData(0)))
            return false;
        if (!Arrays.equals(getAsIntArray(yuv, crSize), pic.getPlaneData(1)))
            return false;
        if (!Arrays.equals(getAsIntArray(yuv, cbSize), pic.getPlaneData(2)))
            return false;
    }
    return true;
}
 
开发者ID:PenoaksDev,项目名称:OpenSpaceDVR,代码行数:26,代码来源:VerifyTool.java


注:本文中的org.jcodec.common.model.ColorSpace类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。