本文整理汇总了C++中MediaSubsession类的典型用法代码示例。如果您正苦于以下问题:C++ MediaSubsession类的具体用法?C++ MediaSubsession怎么用?C++ MediaSubsession使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了MediaSubsession类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: iter
Boolean AVIFileSink::continuePlaying() {
// Run through each of our input session's 'subsessions',
// asking for a frame from each one:
Boolean haveActiveSubsessions = False;
MediaSubsessionIterator iter(fInputSession);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
FramedSource* subsessionSource = subsession->readSource();
if (subsessionSource == NULL) continue;
if (subsessionSource->isCurrentlyAwaitingData()) continue;
AVISubsessionIOState* ioState
= (AVISubsessionIOState*)(subsession->miscPtr);
if (ioState == NULL) continue;
haveActiveSubsessions = True;
unsigned char* toPtr = ioState->fBuffer->dataEnd();
unsigned toSize = ioState->fBuffer->bytesAvailable();
subsessionSource->getNextFrame(toPtr, toSize,
afterGettingFrame, ioState,
onSourceClosure, ioState);
}
if (!haveActiveSubsessions) {
envir().setResultMsg("No subsessions are currently active");
return False;
}
return True;
}
示例2: checkForPacketArrival
void checkForPacketArrival(void* /*clientData*/) {
if (!notifyOnPacketArrival) return; // we're not checking
// Check each subsession, to see whether it has received data packets:
unsigned numSubsessionsChecked = 0;
unsigned numSubsessionsWithReceivedData = 0;
unsigned numSubsessionsThatHaveBeenSynced = 0;
MediaSubsessionIterator iter(*session);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL)
{
RTPSource* src = subsession->rtpSource();
if (src == NULL) continue;
++numSubsessionsChecked;
if (src->receptionStatsDB().numActiveSourcesSinceLastReset() > 0)
{
// At least one data packet has arrived
++numSubsessionsWithReceivedData;
}
if (src->hasBeenSynchronizedUsingRTCP())
++numSubsessionsThatHaveBeenSynced;
}
unsigned numSubsessionsToCheck = numSubsessionsChecked;
// Special case for "QuickTimeFileSink"s and "AVIFileSink"s:
// They might not use all of the input sources:
if (qtOut != NULL)
numSubsessionsToCheck = qtOut->numActiveSubsessions();
else if (aviOut != NULL)
numSubsessionsToCheck = aviOut->numActiveSubsessions();
Boolean notifyTheUser;
if (!syncStreams)
notifyTheUser = numSubsessionsWithReceivedData > 0; // easy case
else
{
notifyTheUser = numSubsessionsWithReceivedData >= numSubsessionsToCheck
&& numSubsessionsThatHaveBeenSynced == numSubsessionsChecked;
// Note: A subsession with no active sources is considered to be synced
}
if (notifyTheUser)
{
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
char timestampStr[100];
sprintf(timestampStr, "%ld%03ld", timeNow.tv_sec, (long)(timeNow.tv_usec/1000));
*env << (syncStreams ? "Synchronized d" : "D")
<< "ata packets have begun arriving [" << timestampStr << "]\007\n";
return;
}
// No luck, so reschedule this check again, after a delay:
int uSecsToDelay = 100000; // 100 ms
arrivalCheckTimerTask
= env->taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc*)checkForPacketArrival, NULL);
}
示例3:
void Live555ClientEngine::onStop()
{
if (session != nullptr)
{
MediaSubsession* subSession = nullptr;
bool someSubsessionsWereActive = false;
for (MediaSubsessionIterator iterator(*session); subSession != nullptr; subSession = iterator.next())
{
if (subSession->sink != nullptr)
{
Medium::close(subSession->sink);
subSession->sink = nullptr;
if (subSession->rtcpInstance() != nullptr)
{
// in case the server sends a RTCP "BYE" while handling "TEARDOWN"
subSession->rtcpInstance()->setByeHandler(nullptr, nullptr);
}
someSubsessionsWereActive = true;
}
}
if (someSubsessionsWereActive)
{
rtspClient->sendTeardownCommand(*session, nullptr);
}
}
Medium::close(rtspClient);
Medium::close(session);
}
示例4: iter
Boolean MediaSession
::initiateByMediaType(char const* mimeType,
MediaSubsession*& resultSubsession,
int useSpecialRTPoffset) {
// Look through this session's subsessions for media that match "mimeType"
resultSubsession = NULL;
MediaSubsessionIterator iter(*this);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
Boolean wasAlreadyInitiated = subsession->readSource() != NULL;
if (!wasAlreadyInitiated) {
// Try to create a source for this subsession:
if (!subsession->initiate(useSpecialRTPoffset)) return False;
}
// Make sure the source's MIME type is one that we handle:
if (strcmp(subsession->readSource()->MIMEtype(), mimeType) != 0) {
if (!wasAlreadyInitiated) subsession->deInitiate();
continue;
}
resultSubsession = subsession;
break; // use this
}
if (resultSubsession == NULL) {
envir().setResultMsg("Session has no usable media subsession");
return False;
}
return True;
}
示例5: beginQOSMeasurement
void beginQOSMeasurement() {
// Set up a measurement record for each active subsession:
struct timeval startTime;
gettimeofday(&startTime, NULL);
nextQOSMeasurementUSecs = startTime.tv_sec*1000000 + startTime.tv_usec;
qosMeasurementRecord* qosRecordTail = NULL;
MediaSubsessionIterator iter(*session);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
RTPSource* src = subsession->rtpSource();
#ifdef SUPPORT_REAL_RTSP
if (session->isRealNetworksRDT) src = (RTPSource*)(subsession->readSource()); // hack
#endif
if (src == NULL) continue;
qosMeasurementRecord* qosRecord
= new qosMeasurementRecord(startTime, src);
if (qosRecordHead == NULL) qosRecordHead = qosRecord;
if (qosRecordTail != NULL) qosRecordTail->fNext = qosRecord;
qosRecordTail = qosRecord;
}
// Then schedule the first of the periodic measurements:
scheduleNextQOSMeasurement();
}
示例6: iter
int CAimer39RTSPClient::GetStreamType( unsigned int nStreamNum, STREAM_TYPE & Type )
{
StreamClientState& scs = m_pRTSPClient->scs; // alias
MediaSubsessionIterator iter(*scs.session);
MediaSubsession* subsession = NULL;
int iStreamCnt = 0;
iter.reset();
while ( ( subsession = iter.next() ) != NULL ) {
if ( strcmp( subsession->mediumName(), "video" ) == 0 ||
strcmp( subsession->mediumName(), "VIDEO" ) == 0 ) {
Type = STREAM_VIDEO;
} else if ( strcmp( subsession->mediumName(), "audio" ) == 0 ||
strcmp( subsession->mediumName(), "AUDIO" ) == 0 ) {
Type = STREAM_AUDIO;
} else {
Type = STREAM_UNKNOWN;
}
if (nStreamNum == iStreamCnt) break;
++iStreamCnt;
}
iter.reset();
return 0;
}
示例7: completeOutputFile
AVIFileSink::~AVIFileSink() {
completeOutputFile();
// Then, stop streaming and delete each active "AVISubsessionIOState":
MediaSubsessionIterator iter(fInputSession);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
if (subsession->readSource() != NULL) subsession->readSource()->stopGettingFrames();
AVISubsessionIOState* ioState
= (AVISubsessionIOState*)(subsession->miscPtr);
if (ioState == NULL) continue;
delete ioState;
}
// Then, delete the index records:
AVIIndexRecord* cur = fIndexRecordsHead;
while (cur != NULL) {
AVIIndexRecord* next = cur->next();
delete cur;
cur = next;
}
// Finally, close our output file:
CloseOutputFile(fOutFid);
}
示例8: checkInterPacketGaps
void checkInterPacketGaps(void* /*clientData*/) {
if (interPacketGapMaxTime == 0) return; // we're not checking
// Check each subsession, counting up how many packets have been received:
unsigned newTotNumPacketsReceived = 0;
MediaSubsessionIterator iter(*session);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
RTPSource* src = subsession->rtpSource();
if (src == NULL) continue;
newTotNumPacketsReceived += src->receptionStatsDB().totNumPacketsReceived();
}
if (newTotNumPacketsReceived == totNumPacketsReceived) {
// No additional packets have been received since the last time we
// checked, so end this stream:
*env << "Closing session, because we stopped receiving packets.\n";
interPacketGapCheckTimerTask = NULL;
sessionAfterPlaying();
} else {
totNumPacketsReceived = newTotNumPacketsReceived;
// Check again, after the specified delay:
interPacketGapCheckTimerTask
= env->taskScheduler().scheduleDelayedTask(interPacketGapMaxTime*1000000,
(TaskFunc*)checkInterPacketGaps, NULL);
}
}
示例9: shutdownStream
void shutdownStream(RTSPClient* rtspClient, int exitCode)
{
ourRTSPClient* rtsp = (ourRTSPClient*)rtspClient;
if (exitCode != 0)
{
//eventLoopWatchVariable = 1;
//int nID = rtsp->m_nID;
IPNC_CloseStream(rtsp->m_nID);
return;
}
UsageEnvironment& env = rtspClient->envir(); // alias
StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
// First, check whether any subsessions have still to be closed:
if (scs.session != NULL) {
Boolean someSubsessionsWereActive = False;
MediaSubsessionIterator iter(*scs.session);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
if (subsession->sink != NULL) {
Medium::close(subsession->sink);
subsession->sink = NULL;
if (subsession->rtcpInstance() != NULL) {
subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
}
someSubsessionsWereActive = True;
}
}
if (someSubsessionsWereActive) {
// Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
// Don't bother handling the response to the "TEARDOWN".
rtspClient->sendTeardownCommand(*scs.session, NULL);
}
}
env << *rtspClient << "Closing the stream.\n";
Medium::close(rtspClient);
rtsp = NULL;
rtspClient = NULL;
// Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.
//if (--rtspClientCount == 0)
{
// The final stream has ended, so exit the application now.
// (Of course, if you're embedding this code into your own application, you might want to comment this out,
// and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".)
//exit(exitCode);
//eventLoopWatchVariable = 1;
//CXAgent::Instance().do_exit();
}
}
示例10: OnDemandServerMediaSubsession
ProxyServerMediaSubsession
::ProxyServerMediaSubsession(MediaSubsession& mediaSubsession,
portNumBits initialPortNum, Boolean multiplexRTCPWithRTP)
: OnDemandServerMediaSubsession(mediaSubsession.parentSession().envir(), True/*reuseFirstSource*/,
initialPortNum, multiplexRTCPWithRTP),
fClientMediaSubsession(mediaSubsession), fCodecName(strDup(mediaSubsession.codecName())),
fNext(NULL), fHaveSetupStream(False) {
}
示例11: findSubSessionByStreamNum
int CAimer39RTSPClient::GetVideoFPS( unsigned int nStreamNum )
{
MediaSubsession * subsession = findSubSessionByStreamNum( nStreamNum );
if ( NULL == subsession ) return -1;
IS_VIDEO_SUBS_R( subsession, -1 );
return (int)subsession->videoFPS();
}
示例12: iter
char* MediaSession::absEndTime() const {
if (fAbsEndTime != NULL) return fAbsEndTime;
// If a subsession has an 'absolute' end time, then use that:
MediaSubsessionIterator iter(*this);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
if (subsession->_absEndTime() != NULL) return subsession->_absEndTime();
}
return NULL;
}
示例13: subsessionByeHandler
void subsessionByeHandler(void* clientData) {
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
unsigned secsDiff = timeNow.tv_sec - startTime.tv_sec;
MediaSubsession* subsession = (MediaSubsession*)clientData;
*env << "Received RTCP \"BYE\" on \"" << subsession->mediumName()
<< "/" << subsession->codecName()
<< "\" subsession (after " << secsDiff
<< " seconds)\n";
// Act now as if the subsession had closed:
subsessionAfterPlaying(subsession);
}
示例14: Medium
AVIFileSink::AVIFileSink(UsageEnvironment& env,
MediaSession& inputSession,
char const* outputFileName,
unsigned bufferSize,
unsigned short movieWidth, unsigned short movieHeight,
unsigned movieFPS, Boolean packetLossCompensate)
: Medium(env), fInputSession(inputSession),
fIndexRecordsHead(NULL), fIndexRecordsTail(NULL), fNumIndexRecords(0),
fBufferSize(bufferSize), fPacketLossCompensate(packetLossCompensate),
fAreCurrentlyBeingPlayed(False), fNumSubsessions(0), fNumBytesWritten(0),
fHaveCompletedOutputFile(False),
fMovieWidth(movieWidth), fMovieHeight(movieHeight), fMovieFPS(movieFPS) {
fOutFid = OpenOutputFile(env, outputFileName);
if (fOutFid == NULL) return;
// Set up I/O state for each input subsession:
MediaSubsessionIterator iter(fInputSession);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL) {
// Ignore subsessions without a data source:
FramedSource* subsessionSource = subsession->readSource();
if (subsessionSource == NULL) continue;
// If "subsession's" SDP description specified screen dimension
// or frame rate parameters, then use these.
if (subsession->videoWidth() != 0) {
fMovieWidth = subsession->videoWidth();
}
if (subsession->videoHeight() != 0) {
fMovieHeight = subsession->videoHeight();
}
if (subsession->videoFPS() != 0) {
fMovieFPS = subsession->videoFPS();
}
AVISubsessionIOState* ioState
= new AVISubsessionIOState(*this, *subsession);
subsession->miscPtr = (void*)ioState;
// Also set a 'BYE' handler for this subsession's RTCP instance:
if (subsession->rtcpInstance() != NULL) {
subsession->rtcpInstance()->setByeHandler(onRTCPBye, ioState);
}
++fNumSubsessions;
}
// Begin by writing an AVI header:
addFileHeader_AVI();
}
示例15: setupStreams
BOOL setupStreams( unsigned *pResponseCode /*= NULL*/ )
{
MediaSubsessionIterator iter(*session);
MediaSubsession *subsession;
Boolean madeProgress = False;
BOOL bResult = TRUE;
while ((subsession = iter.next()) != NULL)
{
if (subsession->clientPortNum() == 0) continue; // port # was not set
if ( !clientSetupSubsession(ourClient, subsession, streamUsingTCP, pResponseCode ) )
{
*env << "Failed to setup \"" << subsession->mediumName()
<< "/" << subsession->codecName()
<< "\" subsession: " << env->getResultMsg() << "\n";
bResult = FALSE;
}
else
{
*env << "Setup \"" << subsession->mediumName()
<< "/" << subsession->codecName()
<< "\" subsession (client ports " << subsession->clientPortNum()
<< "-" << subsession->clientPortNum()+1 << ")\n";
madeProgress = True;
bResult = TRUE;
}
}
//if (!madeProgress)
// return bResult;
return bResult;
}