本文整理汇总了C++中VideoFrame::bits方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoFrame::bits方法的具体用法?C++ VideoFrame::bits怎么用?C++ VideoFrame::bits使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类VideoFrame
的用法示例。
在下文中一共展示了VideoFrame::bits方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: push
bool LibAVFilterPrivate::push(Frame *frame, qreal pts)
{
VideoFrame *vf = static_cast<VideoFrame*>(frame);
if (width != vf->width() || height != vf->height() || pixfmt != vf->pixelFormatFFmpeg() || options_changed) {
width = vf->width();
height = vf->height();
pixfmt = (AVPixelFormat)vf->pixelFormatFFmpeg();
options_changed = false;
if (!setup()) {
qWarning("setup filter graph error");
enabled = false; // skip this filter and avoid crash
return false;
}
}
Q_ASSERT(avframe);
avframe->pts = pts * 1000000.0; // time_base is 1/1000000
avframe->width = vf->width();
avframe->height = vf->height();
avframe->format = pixfmt = (AVPixelFormat)vf->pixelFormatFFmpeg();
for (int i = 0; i < vf->planeCount(); ++i) {
avframe->data[i] =vf->bits(i);
avframe->linesize[i] = vf->bytesPerLine(i);
}
int ret = av_buffersrc_add_frame_flags(in_filter_ctx, avframe, AV_BUFFERSRC_FLAG_KEEP_REF);
if (ret != 0) {
qWarning("av_buffersrc_add_frame error: %s", av_err2str(ret));
return false;
}
return true;
}
示例2: convert
VideoFrame VideoFrameConverter::convert(const VideoFrame &frame, int fffmt) const
{
if (!frame.isValid() || fffmt == QTAV_PIX_FMT_C(NONE))
return VideoFrame();
if (!frame.bits(0)) // hw surface
return frame.to(VideoFormat::pixelFormatFromFFmpeg(fffmt));
const VideoFormat format(frame.format());
//if (fffmt == format.pixelFormatFFmpeg())
// return *this;
if (!m_cvt) {
m_cvt = new ImageConverterSWS();
}
m_cvt->setBrightness(m_eq[0]);
m_cvt->setContrast(m_eq[1]);
m_cvt->setSaturation(m_eq[2]);
m_cvt->setInFormat(format.pixelFormatFFmpeg());
m_cvt->setOutFormat(fffmt);
m_cvt->setInSize(frame.width(), frame.height());
m_cvt->setOutSize(frame.width(), frame.height());
QVector<const uchar*> pitch(format.planeCount());
QVector<int> stride(format.planeCount());
for (int i = 0; i < format.planeCount(); ++i) {
pitch[i] = frame.bits(i);
stride[i] = frame.bytesPerLine(i);
}
if (!m_cvt->convert(pitch.constData(), stride.constData())) {
return VideoFrame();
}
const VideoFormat fmt(fffmt);
VideoFrame f(m_cvt->outData(), frame.width(), frame.height(), fmt);
f.setBits(m_cvt->outPlanes());
f.setBytesPerLine(m_cvt->outLineSizes());
f.setTimestamp(frame.timestamp());
// metadata?
if (fmt.isRGB()) {
f.setColorSpace(fmt.isPlanar() ? ColorSpace_GBR : ColorSpace_RGB);
} else {
f.setColorSpace(ColorSpace_Unknow);
}
return f;
}
示例3: encode
bool VideoEncoderFFmpeg::encode(const VideoFrame &frame)
{
DPTR_D(VideoEncoderFFmpeg);
AVFrame *f = NULL;
if (frame.isValid()) {
f = av_frame_alloc();
f->format = frame.format().pixelFormatFFmpeg();
f->width = frame.width();
f->height = frame.height();
// TODO: record last pts
f->pts = int64_t(frame.timestamp()*frameRate());
// pts is set in muxer
const int nb_planes = frame.planeCount();
for (int i = 0; i < nb_planes; ++i) {
f->linesize[i] = frame.bytesPerLine(i);
f->data[i] = (uint8_t*)frame.bits(i);
}
if (d.avctx->width <= 0) {
d.avctx->width = frame.width();
}
if (d.avctx->height <= 0) {
d.avctx->height = frame.width();
}
}
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = (uint8_t*)d.buffer.constData();
pkt.size = d.buffer.size();
int got_packet = 0;
int ret = avcodec_encode_video2(d.avctx, &pkt, f, &got_packet);
av_frame_free(&f);
if (ret < 0) {
//qWarning("error avcodec_encode_video2: %s" ,av_err2str(ret));
return false; //false
}
if (!got_packet) {
qWarning("no packet got");
return false; //false
}
qDebug("enc avpkt.pts: %lld, dts: %lld.", pkt.pts, pkt.dts);
d.packet = Packet::fromAVPacket(&pkt, av_q2d(d.avctx->time_base));
qDebug("enc packet.pts: %.3f, dts: %.3f.", d.packet.pts, d.packet.dts);
return true;
}
示例4: QPainter
void QPainterFilterContext::initializeOnFrame(Frame *frame)
{
if (!frame) {
if (!painter) {
painter = new QPainter(); //warning: more than 1 painter on 1 device
}
if (!paint_device) {
paint_device = painter->device();
}
if (!paint_device && !painter->isActive()) {
qWarning("No paint device and painter is not active. No painting!");
return;
}
if (!painter->isActive())
painter->begin(paint_device);
return;
}
VideoFrame *vframe = static_cast<VideoFrame*>(frame);
VideoFormat format = vframe->format();
if (!format.isValid()) {
qWarning("Not a valid format");
return;
}
if (format.imageFormat() == QImage::Format_Invalid) {
format.setPixelFormat(VideoFormat::Format_RGB32);
vframe->convertTo(format);
}
if (paint_device) {
if (painter && painter->isActive()) {
painter->end(); //destroy a paint device that is being painted is not allowed!
}
delete paint_device;
paint_device = 0;
}
Q_ASSERT(video_width > 0 && video_height > 0);
// direct draw on frame data, so use VideoFrame::bits()
paint_device = new QImage((uchar*)vframe->bits(0), video_width, video_height, vframe->bytesPerLine(0), format.imageFormat());
if (!painter)
painter = new QPainter();
own_painter = true;
own_paint_device = true; //TODO: what about renderer is not a widget?
painter->begin((QImage*)paint_device);
}
示例5: qWarning
bool Direct2DRenderer::receiveFrame(const VideoFrame& frame)
{
DPTR_D(Direct2DRenderer);
if (!d.prepareBitmap(frame.width(), frame.height()))
return false;
HRESULT hr = S_OK;
//if d2d factory is D2D1_FACTORY_TYPE_SINGLE_THREADED, we need to lock
//QMutexLocker locker(&d.img_mutex);
//Q_UNUSED(locker);
d.video_frame = frame;
//TODO: if CopyFromMemory() is deep copy, mutex can be avoided
/*if lock is required, do not use locker in if() scope, it will unlock outside the scope*/
//TODO: d2d often crash, should we always lock? How about other renderer?
hr = d.bitmap->CopyFromMemory(NULL //&D2D1::RectU(0, 0, image.width(), image.height()) /*&dstRect, NULL?*/,
, frame.bits(0) //data.constData() //msdn: const void*
, frame.bytesPerLine(0));
if (hr != S_OK) {
qWarning("Failed to copy from memory to bitmap (%ld)", hr);
}
update();
return true;
}