本文整理汇总了C++中BBuffer类的典型用法代码示例。如果您正苦于以下问题:C++ BBuffer类的具体用法?C++ BBuffer怎么用?C++ BBuffer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了BBuffer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: acquire_sem_etc
void
ProducerNode::BufferProducer()
{
// this thread produces one buffer each two seconds,
// and shedules it to be handled one second later than produced
// assuming a realtime timesource
status_t rv;
for (;;) {
rv = acquire_sem_etc(mBufferProducerSem,1,B_RELATIVE_TIMEOUT,DELAY);
if (rv == B_INTERRUPTED) {
continue;
} else if (rv == B_OK) {
// triggered by AdditionalBufferRequested
release_sem(mBufferProducerSem);
} else if (rv != B_TIMED_OUT) {
// triggered by deleting the semaphore (stop request)
break;
}
if (!mOutputEnabled)
continue;
BBuffer *buffer;
// out("ProducerNode: RequestBuffer\n");
buffer = mBufferGroup->RequestBuffer(2048);
if (!buffer) {
}
buffer->Header()->start_time = TimeSource()->Now() + DELAY / 2;
out("ProducerNode: SendBuffer, sheduled time = %5.4f\n",buffer->Header()->start_time / 1E6);
rv = SendBuffer(buffer, mOutput.destination);
if (rv != B_OK) {
}
}
}
示例2: BBufferGroup
bigtime_t
EqualizerNode::GetFilterLatency(void)
{
if (fOutputMedia.destination == media_destination::null)
return 0LL;
BBufferGroup* test_group =
new BBufferGroup(fOutputMedia.format.u.raw_audio.buffer_size, 1);
BBuffer* buffer =
test_group->RequestBuffer(fOutputMedia.format.u.raw_audio.buffer_size);
buffer->Header()->type = B_MEDIA_RAW_AUDIO;
buffer->Header()->size_used = fOutputMedia.format.u.raw_audio.buffer_size;
bigtime_t begin = system_time();
FilterBuffer(buffer);
bigtime_t latency = system_time() - begin;
buffer->Recycle();
delete test_group;
InitFilter();
return latency;
}
示例3: PRINT
// figure processing latency by doing 'dry runs' of filterBuffer()
bigtime_t StepMotionBlurFilter::calcProcessingLatency() {
PRINT(("StepMotionBlurFilter::calcProcessingLatency()\n"));
if(m_output.destination == media_destination::null) {
PRINT(("\tNot connected.\n"));
return 0LL;
}
// allocate a temporary buffer group
BBufferGroup* pTestGroup = new BBufferGroup(m_output.format.u.raw_video.display.line_width * m_output.format.u.raw_video.display.line_count *4, 1);
// fetch a buffer
BBuffer* pBuffer = pTestGroup->RequestBuffer(m_output.format.u.raw_video.display.line_width * m_output.format.u.raw_video.display.line_count * 4);
ASSERT(pBuffer);
pBuffer->Header()->type = B_MEDIA_RAW_VIDEO;
pBuffer->Header()->size_used = m_output.format.u.raw_video.display.line_width * m_output.format.u.raw_video.display.line_count * 4;
// run the test
bigtime_t preTest = system_time();
filterBuffer(pBuffer);
bigtime_t elapsed = system_time()-preTest;
// clean up
pBuffer->Recycle();
delete pTestGroup;
// reset filter state
initFilter();
return elapsed;
}
示例4: CALLED
// -------------------------------------------------------- //
// implementation for BMediaEventLooper
// -------------------------------------------------------- //
// protected:
status_t MediaReader::HandleBuffer(
const media_timed_event *event,
bigtime_t lateness,
bool realTimeEvent)
{
CALLED();
if (output.destination == media_destination::null)
return B_MEDIA_NOT_CONNECTED;
status_t status = B_OK;
BBuffer * buffer = fBufferGroup->RequestBuffer(output.format.u.multistream.max_chunk_size,fBufferPeriod);
if (buffer != 0) {
status = FillFileBuffer(buffer);
if (status != B_OK) {
PRINT("MediaReader::HandleEvent got an error from FillFileBuffer.\n");
buffer->Recycle();
} else {
if (fOutputEnabled) {
status = SendBuffer(buffer,output.destination);
if (status != B_OK) {
PRINT("MediaReader::HandleEvent got an error from SendBuffer.\n");
buffer->Recycle();
}
} else {
buffer->Recycle();
}
}
}
bigtime_t nextEventTime = event->event_time+fBufferPeriod;
media_timed_event nextBufferEvent(nextEventTime, BTimedEventQueue::B_HANDLE_BUFFER);
EventQueue()->AddEvent(nextBufferEvent);
return status;
}
示例5: sizeof
void
ClientNode::_DataAvailable(bigtime_t time)
{
size_t samples = fFormat.u.raw_audio.buffer_size / sizeof(float);
fFramesSent += samples;
JackPortList* ports = fOwner->GetOutputPorts();
for (int i = 0; i < ports->CountItems(); i++) {
JackPort* port = ports->ItemAt(i);
if (port != NULL && port->IsConnected()) {
BBuffer* buffer = FillNextBuffer(time, port);
if (buffer) {
if (SendBuffer(buffer,
port->MediaOutput()->source, port->MediaOutput()->destination)
!= B_OK) {
printf("ClientNode::_DataAvailable: Buffer sending "
"failed\n");
buffer->Recycle();
}
size_t nFrames = fFormat.u.raw_audio.buffer_size
/ ((fFormat.u.raw_audio.format
& media_raw_audio_format::B_AUDIO_SIZE_MASK)
* fFormat.u.raw_audio.channel_count);
}
if (buffer == NULL)
return;
}
}
}
示例6: CALLED
BBuffer*
SoundPlayNode::FillNextBuffer(bigtime_t eventTime)
{
CALLED();
// get a buffer from our buffer group
BBuffer* buffer = fBufferGroup->RequestBuffer(
fOutput.format.u.raw_audio.buffer_size, BufferDuration() / 2);
// If we fail to get a buffer (for example, if the request times out), we
// skip this buffer and go on to the next, to avoid locking up the control
// thread
if (buffer == NULL) {
ERROR("SoundPlayNode::FillNextBuffer: RequestBuffer failed\n");
return NULL;
}
if (fPlayer->HasData()) {
fPlayer->PlayBuffer(buffer->Data(),
fOutput.format.u.raw_audio.buffer_size, fOutput.format.u.raw_audio);
} else
memset(buffer->Data(), 0, fOutput.format.u.raw_audio.buffer_size);
// fill in the buffer header
media_header* header = buffer->Header();
header->type = B_MEDIA_RAW_AUDIO;
header->size_used = fOutput.format.u.raw_audio.buffer_size;
header->time_source = TimeSource()->ID();
header->start_time = eventTime;
return buffer;
}
示例7: PRINT
// figure processing latency by doing 'dry runs' of filterBuffer()
bigtime_t FlangerNode::calcProcessingLatency() {
PRINT(("FlangerNode::calcProcessingLatency()\n"));
if(m_output.destination == media_destination::null) {
PRINT(("\tNot connected.\n"));
return 0LL;
}
// allocate a temporary buffer group
BBufferGroup* pTestGroup = new BBufferGroup(
m_output.format.u.raw_audio.buffer_size, 1);
// fetch a buffer
BBuffer* pBuffer = pTestGroup->RequestBuffer(
m_output.format.u.raw_audio.buffer_size);
ASSERT(pBuffer);
pBuffer->Header()->type = B_MEDIA_RAW_AUDIO;
pBuffer->Header()->size_used = m_output.format.u.raw_audio.buffer_size;
// run the test
bigtime_t preTest = system_time();
filterBuffer(pBuffer);
bigtime_t elapsed = system_time()-preTest;
// clean up
pBuffer->Recycle();
delete pTestGroup;
// reset filter state
initFilter();
return elapsed;
}
示例8: read
BINLINE void BMessageHeader::read(BBuffer& bbuf) {
bbuf.setByteOrder(BBIG_ENDIAN);
bool cmpr = bbuf.setCompressInteger(false);
bbuf.serialize(magic);
switch(magic) {
case BMAGIC_BINARY_STREAM:
break;
case BMAGIC_BINARY_STREAM_LE:
magic = BMAGIC_BINARY_STREAM;
bbuf.setByteOrder(BLITTLE_ENDIAN);
break;
default:
throw BException(BExceptionC::CORRUPT, L"Stream must start with BYPS or SPYB");
}
bbuf.serialize(error);
bbuf.serialize(flags);
if (flags & BHEADER_FLAG_BYPS_VERSION) {
bbuf.serialize(bversion);
}
bbuf.serialize(version);
targetId.serialize(bbuf, bversion);
bbuf.serialize(messageId);
bbuf.setCompressInteger(cmpr);
if (bversion >= BHEADER_BYPS_VERSION_WITH_SESSIONID) {
sessionId = BTargetId::readSessionId(bbuf);
}
}
示例9: BBufferGroup
void
FireWireDVNode::card_reader_thread()
{
status_t err;
size_t rbufsize;
int rcount;
fCard->GetBufInfo(&rbufsize, &rcount);
delete fBufferGroupEncVideo;
fBufferGroupEncVideo = new BBufferGroup(rbufsize, rcount);
while (!fTerminateThreads) {
void *data, *end;
ssize_t sizeUsed = fCard->Read(&data);
if (sizeUsed < 0) {
TRACE("FireWireDVNode::%s: %s\n", __FUNCTION__,
strerror(sizeUsed));
continue;
}
end = (char*)data + sizeUsed;
while (data < end) {
BBuffer* buf = fBufferGroupEncVideo->RequestBuffer(rbufsize, 10000);
if (!buf) {
TRACE("OutVideo: request buffer timout\n");
continue;
}
err = fCard->Extract(buf->Data(), &data, &sizeUsed);
if (err) {
buf->Recycle();
printf("OutVideo Extract error %s\n", strerror(err));
continue;
}
media_header* hdr = buf->Header();
hdr->type = B_MEDIA_ENCODED_VIDEO;
hdr->size_used = sizeUsed;
hdr->time_source = TimeSource()->ID(); // set time source id
//what should the start_time be?
hdr->start_time = TimeSource()->PerformanceTimeFor(system_time());
fLock.Lock();
if (SendBuffer(buf, fOutputEncVideo.source,
fOutputEncVideo.destination) != B_OK) {
TRACE("OutVideo: sending buffer failed\n");
buf->Recycle();
}
fLock.Unlock();
}
}
}
示例10: BufferDuration
BBuffer*
GameProducer::FillNextBuffer(bigtime_t event_time)
{
// get a buffer from our buffer group
BBuffer* buf = fBufferGroup->RequestBuffer(fBufferSize, BufferDuration());
// if we fail to get a buffer (for example, if the request times out), we
// skip this buffer and go on to the next, to avoid locking up the control
// thread.
if (!buf)
return NULL;
// we need to discribe the buffer
int64 frames = int64(fBufferSize / fFrameSize);
memset(buf->Data(), 0, fBufferSize);
// now fill the buffer with data, continuing where the last buffer left off
fObject->Play(buf->Data(), frames);
// fill in the buffer header
media_header* hdr = buf->Header();
hdr->type = B_MEDIA_RAW_AUDIO;
hdr->size_used = fBufferSize;
hdr->time_source = TimeSource()->ID();
bigtime_t stamp;
if (RunMode() == B_RECORDING) {
// In B_RECORDING mode, we stamp with the capture time. We're not
// really a hardware capture node, but we simulate it by using the
// (precalculated) time at which this buffer "should" have been created.
stamp = event_time;
} else {
// okay, we're in one of the "live" performance run modes. in these
// modes, we stamp the buffer with the time at which the buffer should
// be rendered to the output, not with the capture time. fStartTime is
// the cached value of the first buffer's performance time; we calculate
// this buffer's performance time as an offset from that time, based on
// the amount of media we've created so far.
// Recalculating every buffer like this avoids accumulation of error.
stamp = fStartTime + bigtime_t(double(fFramesSent)
/ double(fOutput.format.u.raw_audio.frame_rate) * 1000000.0);
}
hdr->start_time = stamp;
return buf;
}
示例11: PRINT
// figure processing latency by doing 'dry runs' of processBuffer()
bigtime_t AudioFilterNode::calcProcessingLatency() {
PRINT(("AudioFilterNode::calcProcessingLatency()\n"));
ASSERT(m_input.source != media_source::null);
ASSERT(m_output.destination != media_destination::null);
ASSERT(m_op);
// initialize filter
m_op->init();
size_t maxSize = max_c(
m_input.format.u.raw_audio.buffer_size,
m_output.format.u.raw_audio.buffer_size);
// allocate a temporary buffer group
BBufferGroup* testGroup = new BBufferGroup(
maxSize, 1);
// fetch a buffer big enough for in-place processing
BBuffer* buffer = testGroup->RequestBuffer(
maxSize, -1);
ASSERT(buffer);
buffer->Header()->type = B_MEDIA_RAW_AUDIO;
buffer->Header()->size_used = m_input.format.u.raw_audio.buffer_size;
// run the test
bigtime_t preTest = system_time();
processBuffer(buffer, buffer);
bigtime_t elapsed = system_time()-preTest;
// clean up
buffer->Recycle();
delete testGroup;
// reset filter state
m_op->init();
return elapsed;// + 100000LL;
}
示例12: switch
void
AudioConsumer::HandleEvent(const media_timed_event *event,
bigtime_t late, bool realTimeEvent)
{
//printf("ClientNode::HandleEvent %d\n", event->type);
switch (event->type) {
case BTimedEventQueue::B_HANDLE_BUFFER:
{
printf("BTimedEventQueue::B_HANDLE_BUFFER\n");
if (RunState() == B_STARTED) {
// log latency
BBuffer* buffer = (BBuffer*)event->pointer;
buffer->Recycle();
}
break;
}
default:
break;
}
}
示例13: TimeSource
BBuffer*
ClientNode::FillNextBuffer(bigtime_t eventTime, JackPort* port)
{
//printf("FillNextBuffer\n");
BBuffer* buffer = port->CurrentBuffer();
media_header* header = buffer->Header();
header->type = B_MEDIA_RAW_AUDIO;
header->size_used = fFormat.u.raw_audio.buffer_size;
header->time_source = TimeSource()->ID();
bigtime_t start;
if (RunMode() == B_RECORDING)
start = eventTime;
else
start = fTime + bigtime_t(double(fFramesSent)
/ double(fFormat.u.raw_audio.frame_rate) * 1000000.0);
header->start_time = start;
return buffer;
}
示例14: printf
status_t
ClientNode::_InitOutputPorts()
{
//printf("JackClient::_InitOutputPorts()\n");
JackPortList* outputPorts = fOwner->GetOutputPorts();
for (int i = 0; i < outputPorts->CountItems(); i++) {
JackPort* port = outputPorts->ItemAt(i);
if (!port->IsConnected())
return B_ERROR;
BBuffer* buffer = fBufferGroup->RequestBuffer(
fFormat.u.raw_audio.buffer_size);
if (buffer == NULL || buffer->Data() == NULL) {
printf("RequestBuffer failed\n");
return B_ERROR;
}
port->SetProcessingBuffer(buffer);
}
return B_OK;
}
示例15: write
BINLINE void BMessageHeader::write(BBuffer& bbuf) {
bool cmpr = bbuf.setCompressInteger(false);
bbuf.setByteOrder(byteOrder);
bbuf.serialize(magic);
bbuf.serialize(error);
bbuf.serialize(flags);
if (flags & BHEADER_FLAG_BYPS_VERSION) {
bbuf.serialize(bversion);
}
bbuf.serialize(version);
targetId.serialize(bbuf, bversion);
bbuf.serialize(messageId);
bbuf.setCompressInteger(cmpr);
if (bversion >= BHEADER_BYPS_VERSION_WITH_SESSIONID) {
BTargetId::writeSessionId(bbuf, sessionId);
}
}