本文整理汇总了C++中UsageEnvironment::reclaim方法的典型用法代码示例。如果您正苦于以下问题:C++ UsageEnvironment::reclaim方法的具体用法?C++ UsageEnvironment::reclaim怎么用?C++ UsageEnvironment::reclaim使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UsageEnvironment
的用法示例。
在下文中一共展示了UsageEnvironment::reclaim方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: sendBeepSound
int sendBeepSound(const char* rtspURL, const char* username, const char* password) {
FILE* fp = fopen(WAVE_FILE, "r");
if ( fp == NULL )
{
LOG("wave file not exists : %s", WAVE_FILE);
return -1;
}
else
{
fclose(fp);
}
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
// Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish
// to receive (even if more than stream uses the same "rtsp://" URL).
ourRTSPClient* rtspClient = ourRTSPClient::createNew(*env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, "SCBT BackChannel");
if (rtspClient == NULL) {
*env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env->getResultMsg() << "\n";
env->reclaim(); env = NULL;
delete scheduler; scheduler = NULL;
return -2;
}
rtspClient->bRequireBackChannel = bEnableBackChannel;
// Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
// Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
// Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
Authenticator auth;
auth.setUsernameAndPassword(username, password);
rtspClient->sendDescribeCommand(continueAfterDESCRIBE, &auth);
//continueAfterSETUP(rtspClient, 0, new char[2]);
//startPlay(rtspClient);
// All subsequent activity takes place within the event loop:
env->taskScheduler().doEventLoop(&(rtspClient->scs.eventLoopWatchVariable));
// This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero.
// If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above),
// and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects,
// then you can also reclaim the (small) memory used by these objects by uncommenting the following code:
env->reclaim(); env = NULL;
delete scheduler; scheduler = NULL;
return 0;
}
示例2: teardownRTSPorSIPSession
extern "C" void demux_close_rtp(demuxer_t* demuxer) {
// Reclaim all RTP-related state:
// Get the RTP state that was stored in the demuxer's 'priv' field:
RTPState* rtpState = (RTPState*)(demuxer->priv);
if (rtpState == NULL) return;
teardownRTSPorSIPSession(rtpState);
UsageEnvironment* env = NULL;
TaskScheduler* scheduler = NULL;
if (rtpState->mediaSession != NULL) {
env = &(rtpState->mediaSession->envir());
scheduler = &(env->taskScheduler());
}
Medium::close(rtpState->mediaSession);
Medium::close(rtpState->rtspClient);
Medium::close(rtpState->sipClient);
delete rtpState->audioBufferQueue;
delete rtpState->videoBufferQueue;
delete[] rtpState->sdpDescription;
delete rtpState;
#ifdef CONFIG_LIBAVCODEC
av_freep(&avcctx);
#endif
env->reclaim(); delete scheduler;
}
示例3: rtsp_fun
void CRTSPSession::rtsp_fun()
{
//::startRTSP(m_progName.c_str(), m_rtspUrl.c_str(), m_ndebugLever);
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
if (openURL(*env, m_progName.c_str(), m_rtspUrl.c_str(), m_debugLevel) == 0)
{
m_nStatus = 1;
env->taskScheduler().doEventLoop(&eventLoopWatchVariable);
m_running = false;
eventLoopWatchVariable = 0;
if (m_rtspClient)
{
shutdownStream(m_rtspClient,0);
}
m_rtspClient = NULL;
}
env->reclaim();
env = NULL;
delete scheduler;
scheduler = NULL;
m_nStatus = 2;
}
示例4: main
int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord("username1", "password1"); // replace these with real strings
// Repeat the above with each <username>, <password> that you wish to allow
// access to the server.
#endif
// Create the RTSP server:
RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
// Add live stream
WW_H264VideoSource * videoSource = 0;
ServerMediaSession * sms = ServerMediaSession::createNew(*env, "live", 0, "ww live test");
sms->addSubsession(WW_H264VideoServerMediaSubsession::createNew(*env, videoSource));
rtspServer->addServerMediaSession(sms);
char * url = rtspServer->rtspURL(sms);
*env << "using url \"" << url << "\"\n";
delete[] url;
// Run loop
env->taskScheduler().doEventLoop();
rtspServer->removeServerMediaSession(sms);
Medium::close(rtspServer);
env->reclaim();
delete scheduler;
return 1;
}
示例5: StreamShutdown
//**************************************************************************************
void StreamShutdown()
{
if (m_rtspServer != NULL)
{
LogDebug("Stream server:Shutting down RTSP server");
MPRTSPServer *server = m_rtspServer;
m_rtspServer = NULL;
Medium::close(server);
}
if (m_env != NULL)
{
LogDebug("Stream server:Cleaning up environment");
UsageEnvironment *env = m_env;
m_env = NULL;
TaskScheduler *scheduler = &env->taskScheduler();
env->reclaim();
delete scheduler;
}
}
示例6: main
//.........这里部分代码省略.........
case 'm': multicast = true; break;
case 'W': width = atoi(optarg); break;
case 'H': height = atoi(optarg); break;
case 'Q': queueSize = atoi(optarg); break;
case 'P': rtspPort = atoi(optarg); break;
case 'T': rtspOverHTTPPort = atoi(optarg); break;
case 'F': fps = atoi(optarg); break;
case 'M': useMmap = true; break;
case 'h':
{
std::cout << argv[0] << " [-v[v]][-m] [-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-M] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl;
std::cout << "\t -v : verbose " << std::endl;
std::cout << "\t -v v : very verbose " << std::endl;
std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl;
std::cout << "\t -O file : Dump capture to a file" << std::endl;
std::cout << "\t RTSP options :" << std::endl;
std::cout << "\t -m : Enable multicast output" << std::endl;
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -M : V4L2 capture using memory mapped buffers (default use read interface)" << std::endl;
std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl;
std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl;
std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl;
std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl;
exit(0);
}
}
}
if (optind<argc)
{
dev_name = argv[optind];
}
// create live555 environment
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
// create RTSP server
RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
}
else
{
// set http tunneling
if (rtspOverHTTPPort)
{
rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort);
}
// Init capture
*env << "Create V4L2 Source..." << dev_name << "\n";
V4L2DeviceParameters param(dev_name,format,width,height,fps,verbose);
V4L2Device* videoCapture = NULL;
if (useMmap)
{
videoCapture = V4L2MMAPDeviceSource::createNew(param);
}
else
{
videoCapture = V4L2READDeviceSource::createNew(param);
}
V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outputFile, queueSize, verbose);
if (videoES == NULL)
{
*env << "Unable to create source for device " << dev_name << "\n";
}
else
{
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
OutPacketBuffer::maxSize = videoCapture->getBufferSize();
StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);
// Create Server Multicast Session
if (multicast)
{
addSession(rtspServer, "multicast", MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, 96, replicator,format));
}
// Create Server Unicast Session
addSession(rtspServer, "unicast", UnicastServerMediaSubsession::createNew(*env,replicator,format));
// main loop
signal(SIGINT,sighandler);
env->taskScheduler().doEventLoop(&quit);
*env << "Exiting..\n";
}
Medium::close(videoES);
delete videoCapture;
Medium::close(rtspServer);
}
env->reclaim();
delete scheduler;
return 0;
}
示例7: main
//.........这里部分代码省略.........
std::list<std::string>::iterator devIt;
for ( devIt=devList.begin() ; devIt!=devList.end() ; ++devIt)
{
std::string deviceName(*devIt);
// Init capture
LOG(NOTICE) << "Create V4L2 Source..." << deviceName;
V4L2DeviceParameters param(deviceName.c_str(),format,width,height,fps, verbose);
V4l2Capture* videoCapture = V4l2DeviceFactory::CreateVideoCapure(param, useMmap);
if (videoCapture)
{
nbSource++;
format = videoCapture->getFormat();
int outfd = -1;
V4l2Output* out = NULL;
if (!outputFile.empty())
{
V4L2DeviceParameters outparam(outputFile.c_str(), videoCapture->getFormat(), videoCapture->getWidth(), videoCapture->getHeight(), 0,verbose);
V4l2Output* out = V4l2DeviceFactory::CreateVideoOutput(outparam, useMmap);
if (out != NULL)
{
outfd = out->getFd();
}
}
LOG(NOTICE) << "Start V4L2 Capture..." << deviceName;
if (!videoCapture->captureStart())
{
LOG(NOTICE) << "Cannot start V4L2 Capture for:" << deviceName;
}
V4L2DeviceSource* videoES = NULL;
if (format == V4L2_PIX_FMT_H264)
{
videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread, repeatConfig);
}
else
{
videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread);
}
if (videoES == NULL)
{
LOG(FATAL) << "Unable to create source for device " << deviceName;
delete videoCapture;
}
else
{
// extend buffer size if needed
if (videoCapture->getBufferSize() > OutPacketBuffer::maxSize)
{
OutPacketBuffer::maxSize = videoCapture->getBufferSize();
}
StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);
std::string baseUrl;
if (devList.size() > 1)
{
baseUrl = basename(deviceName.c_str());
baseUrl.append("/");
}
// Create Multicast Session
if (multicast)
{
LOG(NOTICE) << "RTP address " << inet_ntoa(destinationAddress) << ":" << rtpPortNum;
LOG(NOTICE) << "RTCP address " << inet_ntoa(destinationAddress) << ":" << rtcpPortNum;
addSession(rtspServer, baseUrl+murl, MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,format));
// increment ports for next sessions
rtpPortNum+=2;
rtcpPortNum+=2;
}
// Create Unicast Session
addSession(rtspServer, baseUrl+url, UnicastServerMediaSubsession::createNew(*env,replicator,format));
}
if (out)
{
delete out;
}
}
}
if (nbSource>0)
{
// main loop
signal(SIGINT,sighandler);
env->taskScheduler().doEventLoop(&quit);
LOG(NOTICE) << "Exiting....";
}
Medium::close(rtspServer);
}
env->reclaim();
delete scheduler;
return 0;
}
示例8: main
//.........这里部分代码省略.........
if( audioType == AUDIO_G711)
{
sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "PCMU", 1);
}
else
{
char const* encoderConfigStr = "1408";// (2<<3)|(8>>1) = 0x14 ; ((8<<7)&0xFF)|(1<<3)=0x08 ;
sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio,
96,
audioSamplingFrequency,
"audio", "AAC-hbr",
encoderConfigStr, audioNumChannels);
}
}
else{
if(audioType == AUDIO_G711)
{
sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 0, audioSamplingFrequency, "audio", "PCMU", 1);
}
else{
char const* encoderConfigStr = "1588";// (2<<3)|(11>>1) = 0x15 ; ((11<<7)&0xFF)|(1<<3)=0x88 ;
sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio,
96,
audioSamplingFrequency,
"audio", "AAC-hbr",
encoderConfigStr, audioNumChannels);
}
}
// Create (and start) a 'RTCP instance' for this RTP sink:
unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share
rtcpAudio = RTCPInstance::createNew(*env, rtcpGroupsockAudio,
totalSessionBandwidthAudio, CNAME,
sinkAudio, NULL /* we're a server */,
streamingMode == STREAMING_MULTICAST_SSM);
// Note: This starts RTCP running automatically
sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkAudio, rtcpAudio));
// Start streaming:
sinkAudio->startPlaying(*sourceAudio, NULL, NULL);
}
rtspServer->addServerMediaSession(sms);
{
struct in_addr dest; dest.s_addr = multicastAddress;
char *url = rtspServer->rtspURL(sms);
//char *url2 = inet_ntoa(dest);
*env << "Mulicast Play this stream using the URL:\n\t" << url << "\n";
//*env << "2 Mulicast addr:\n\t" << url2 << "\n";
delete[] url;
}
}
// Begin the LIVE555 event loop:
env->taskScheduler().doEventLoop(&watchVariable); // does not return
if( streamingMode!= STREAMING_UNICAST )
{
Medium::close(rtcpAudio);
Medium::close(sinkAudio);
Medium::close(sourceAudio);
delete rtpGroupsockAudio;
delete rtcpGroupsockAudio;
Medium::close(rtcpVideo);
Medium::close(sinkVideo);
Medium::close(sourceVideo);
delete rtpGroupsockVideo;
delete rtcpGroupsockVideo;
}
Medium::close(rtspServer); // will also reclaim "sms" and its "ServerMediaSubsession"s
if( MjpegInputDevice != NULL )
{
Medium::close(MjpegInputDevice);
}
if( H264InputDevice != NULL )
{
Medium::close(H264InputDevice);
}
if( Mpeg4InputDevice != NULL )
{
Medium::close(Mpeg4InputDevice);
}
env->reclaim();
delete scheduler;
ApproInterfaceExit();
return 0; // only to prevent compiler warning
}