本文整理汇总了C++中UsageEnvironment::getResultMsg方法的典型用法代码示例。如果您正苦于以下问题:C++ UsageEnvironment::getResultMsg方法的具体用法?C++ UsageEnvironment::getResultMsg怎么用?C++ UsageEnvironment::getResultMsg使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UsageEnvironment
的用法示例。
在下文中一共展示了UsageEnvironment::getResultMsg方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: OutputSocket
// Constructor for a source-independent multicast group
Groupsock::Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr,
Port port, u_int8_t ttl)
: OutputSocket(env, port),
deleteIfNoMembers(False), isSlave(False),
fIncomingGroupEId(groupAddr, port.num(), ttl), fDests(NULL), fTTL(ttl) {
addDestination(groupAddr, port);
// printf("Groupsock: grpaddr %s\n", inet_ntoa(groupAddr)); //jay
if (!socketJoinGroup(env, socketNum(), groupAddr.s_addr)) {
if (DebugLevel >= 1) {
env << *this << ": failed to join group: "
<< env.getResultMsg() << "\n";
}
}
// Make sure we can get our source address:
if (ourIPAddress(env) == 0) {
if (DebugLevel >= 0) { // this is a fatal error
env << "Unable to determine our source address: "
<< env.getResultMsg() << "\n";
}
}
if (DebugLevel >= 2) env << *this << ": created\n";
}
示例2: openURL
int CRTSPSession::openURL(UsageEnvironment& env, char const* progName, char const* rtspURL, int debugLevel)
{
m_rtspClient = ourRTSPClient::createNew(env, rtspURL, debugLevel, progName);
if (m_rtspClient == NULL) {
env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n";
return -1;
}
((ourRTSPClient*)m_rtspClient)->m_nID = m_nID;
m_rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
return 0;
}
示例3: sendBeepSound
int sendBeepSound(const char* rtspURL, const char* username, const char* password) {
FILE* fp = fopen(WAVE_FILE, "r");
if ( fp == NULL )
{
LOG("wave file not exists : %s", WAVE_FILE);
return -1;
}
else
{
fclose(fp);
}
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
// Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish
// to receive (even if more than stream uses the same "rtsp://" URL).
ourRTSPClient* rtspClient = ourRTSPClient::createNew(*env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, "SCBT BackChannel");
if (rtspClient == NULL) {
*env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env->getResultMsg() << "\n";
env->reclaim(); env = NULL;
delete scheduler; scheduler = NULL;
return -2;
}
rtspClient->bRequireBackChannel = bEnableBackChannel;
// Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
// Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
// Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
Authenticator auth;
auth.setUsernameAndPassword(username, password);
rtspClient->sendDescribeCommand(continueAfterDESCRIBE, &auth);
//continueAfterSETUP(rtspClient, 0, new char[2]);
//startPlay(rtspClient);
// All subsequent activity takes place within the event loop:
env->taskScheduler().doEventLoop(&(rtspClient->scs.eventLoopWatchVariable));
// This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero.
// If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above),
// and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects,
// then you can also reclaim the (small) memory used by these objects by uncommenting the following code:
env->reclaim(); env = NULL;
delete scheduler; scheduler = NULL;
return 0;
}
示例4: output
Boolean Groupsock::output(UsageEnvironment& env, u_int8_t ttlToSend,
unsigned char* buffer, unsigned bufferSize,
DirectedNetInterface* interfaceNotToFwdBackTo) {
do {
// First, do the datagram send, to each destination:
Boolean writeSuccess = True;
for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) {
int res = 0;
if (!(res = write(dests->fGroupEId.groupAddress().s_addr, dests->fPort, ttlToSend,
buffer, bufferSize))) {
if (-1 == res)
{
fprintf(stderr, "errno = %d, errorsrting = %s\n", strerror(errno));
}
writeSuccess = False;
break;
}
}
if (!writeSuccess) break;
statsOutgoing.countPacket(bufferSize);
statsGroupOutgoing.countPacket(bufferSize);
// Then, forward to our members:
int numMembers = 0;
if (!members().IsEmpty()) {
numMembers =
outputToAllMembersExcept(interfaceNotToFwdBackTo,
ttlToSend, buffer, bufferSize,
ourIPAddress(env));
if (numMembers < 0) break;
}
if (DebugLevel >= 3) {
env << *this << ": wrote " << bufferSize << " bytes, ttl "
<< (unsigned)ttlToSend;
if (numMembers > 0) {
env << "; relayed to " << numMembers << " members";
}
env << "\n";
}
return True;
} while (0);
if (DebugLevel >= 0) { // this is a fatal error
env.setResultMsg("Groupsock write failed: ", env.getResultMsg());
}
return False;
}
示例5: openURL
void openURL(UsageEnvironment& env, char const* progName, char const* rtspURL) {
// Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish
// to receive (even if more than stream uses the same "rtsp://" URL).
RTSPClient* rtspClient = ourRTSPClient::createNew(env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, progName);
if (rtspClient == NULL) {
env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n";
return;
}
++rtspClientCount;
// Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
// Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
// Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
}
示例6: main
int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord("username1", "password1"); // replace these with real strings
// Repeat the above with each <username>, <password> that you wish to allow
// access to the server.
#endif
// Create the RTSP server:
RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
// Add live stream
WW_H264VideoSource * videoSource = 0;
ServerMediaSession * sms = ServerMediaSession::createNew(*env, "live", 0, "ww live test");
sms->addSubsession(WW_H264VideoServerMediaSubsession::createNew(*env, videoSource));
rtspServer->addServerMediaSession(sms);
char * url = rtspServer->rtspURL(sms);
*env << "using url \"" << url << "\"\n";
delete[] url;
// Run loop
env->taskScheduler().doEventLoop();
rtspServer->removeServerMediaSession(sms);
Medium::close(rtspServer);
env->reclaim();
delete scheduler;
return 1;
}
示例7: output
Boolean Groupsock::output(UsageEnvironment& env, unsigned char* buffer, unsigned bufferSize,
DirectedNetInterface* interfaceNotToFwdBackTo) {
do {
// First, do the datagram send, to each destination:
Boolean writeSuccess = True;
for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) {
if (!write(dests->fGroupEId.groupAddress().s_addr, dests->fGroupEId.portNum(), dests->fGroupEId.ttl(),
buffer, bufferSize)) {
writeSuccess = False;
break;
}
}
if (!writeSuccess) break;
statsOutgoing.countPacket(bufferSize);
statsGroupOutgoing.countPacket(bufferSize);
// Then, forward to our members:
int numMembers = 0;
if (!members().IsEmpty()) {
numMembers =
outputToAllMembersExcept(interfaceNotToFwdBackTo,
ttl(), buffer, bufferSize,
ourIPAddress(env));
if (numMembers < 0) break;
}
if (DebugLevel >= 3) {
env << *this << ": wrote " << bufferSize << " bytes, ttl " << (unsigned)ttl();
if (numMembers > 0) {
env << "; relayed to " << numMembers << " members";
}
env << "\n";
}
return True;
} while (0);
if (DebugLevel >= 0) { // this is a fatal error
UsageEnvironment::MsgString msg = strDup(env.getResultMsg());
env.setResultMsg("Groupsock write failed: ", msg);
delete[] (char*)msg;
}
return False;
}
示例8: main
// -----------------------------------------
// entry point
// -----------------------------------------
int main(int argc, char** argv)
{
// default parameters
const char *dev_name = "/dev/video0";
int format = V4L2_PIX_FMT_H264;
int width = 640;
int height = 480;
int queueSize = 10;
int fps = 25;
unsigned short rtpPortNum = 20000;
unsigned short rtcpPortNum = rtpPortNum+1;
unsigned char ttl = 5;
struct in_addr destinationAddress;
unsigned short rtspPort = 8554;
unsigned short rtspOverHTTPPort = 0;
bool multicast = false;
int verbose = 0;
std::string outputFile;
bool useMmap = false;
// decode parameters
int c = 0;
while ((c = getopt (argc, argv, "hW:H:Q:P:F:v::O:T:mM")) != -1)
{
switch (c)
{
case 'O': outputFile = optarg; break;
case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break;
case 'm': multicast = true; break;
case 'W': width = atoi(optarg); break;
case 'H': height = atoi(optarg); break;
case 'Q': queueSize = atoi(optarg); break;
case 'P': rtspPort = atoi(optarg); break;
case 'T': rtspOverHTTPPort = atoi(optarg); break;
case 'F': fps = atoi(optarg); break;
case 'M': useMmap = true; break;
case 'h':
{
std::cout << argv[0] << " [-v[v]][-m] [-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-M] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl;
std::cout << "\t -v : verbose " << std::endl;
std::cout << "\t -v v : very verbose " << std::endl;
std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl;
std::cout << "\t -O file : Dump capture to a file" << std::endl;
std::cout << "\t RTSP options :" << std::endl;
std::cout << "\t -m : Enable multicast output" << std::endl;
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -M : V4L2 capture using memory mapped buffers (default use read interface)" << std::endl;
std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl;
std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl;
std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl;
std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl;
exit(0);
}
}
}
if (optind<argc)
{
dev_name = argv[optind];
}
// create live555 environment
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
// create RTSP server
RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
}
else
{
// set http tunneling
if (rtspOverHTTPPort)
{
rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort);
}
// Init capture
*env << "Create V4L2 Source..." << dev_name << "\n";
V4L2DeviceParameters param(dev_name,format,width,height,fps,verbose);
V4L2Device* videoCapture = NULL;
if (useMmap)
{
videoCapture = V4L2MMAPDeviceSource::createNew(param);
}
else
{
videoCapture = V4L2READDeviceSource::createNew(param);
}
V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outputFile, queueSize, verbose);
if (videoES == NULL)
{
*env << "Unable to create source for device " << dev_name << "\n";
}
//.........这里部分代码省略.........
示例9: main
int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord("username1", "password1"); // replace these with real strings
// Repeat the above with each <username>, <password> that you wish to allow
// access to the server.
#endif
// Create the RTSP server. Try first with the default port number (554),
// and then with the alternative port number (8554):
RTSPServer* rtspServer;
#ifdef VANLINK_DVR_RTSP_PLAYBACK
portNumBits rtspServerPortNum = 654;//add by sxh rtsp
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
if (rtspServer == NULL) {
rtspServerPortNum = 8654;
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
}
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
#else
portNumBits rtspServerPortNum = 554;
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
if (rtspServer == NULL) {
rtspServerPortNum = 8554;
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
}
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
#endif
*env << "LIVE555 Media Server\n";
*env << "\tversion " << MEDIA_SERVER_VERSION_STRING
<< " (LIVE555 Streaming Media library version "
<< LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n";
char* urlPrefix = rtspServer->rtspURLPrefix();
*env << "Play streams from this server using the URL\n\t"
<< urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n";
*env << "Each file's type is inferred from its name suffix:\n";
*env << "\t\".aac\" => an AAC Audio (ADTS format) file\n";
*env << "\t\".amr\" => an AMR Audio file\n";
*env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n";
*env << "\t\".dv\" => a DV Video file\n";
*env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n";
*env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n";
*env << "\t\".ts\" => a MPEG Transport Stream file\n";
*env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n";
*env << "\t\".wav\" => a WAV Audio file\n";
*env << "See http://www.live555.com/mediaServer/ for additional documentation.\n";
#if 0 // RTSP-over-HTTP tunneling is not yet working
// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
// Try first with the default HTTP port (80), and then with the alternative HTTP
// port number (8000).
RTSPOverHTTPServer* rtspOverHTTPServer;
portNumBits httpServerPortNum = 80;
rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum);
if (rtspOverHTTPServer == NULL) {
httpServerPortNum = 8000;
rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum);
}
if (rtspOverHTTPServer == NULL) {
*env << "(No server for RTSP-over-HTTP tunneling was created.)\n";
} else {
*env << "(We use port " << httpServerPortNum << " for RTSP-over-HTTP tunneling.)\n";
}
#endif
env->taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
}
示例10: iter
extern "C" demuxer_t* demux_open_rtp(demuxer_t* demuxer) {
struct MPOpts *opts = demuxer->opts;
Boolean success = False;
do {
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
if (scheduler == NULL) break;
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
if (env == NULL) break;
RTSPClient* rtspClient = NULL;
SIPClient* sipClient = NULL;
if (demuxer == NULL || demuxer->stream == NULL) break; // shouldn't happen
demuxer->stream->eof = 0; // just in case
// Look at the stream's 'priv' field to see if we were initiated
// via a SDP description:
char* sdpDescription = (char*)(demuxer->stream->priv);
if (sdpDescription == NULL) {
// We weren't given a SDP description directly, so assume that
// we were given a RTSP or SIP URL:
char const* protocol = demuxer->stream->streaming_ctrl->url->protocol;
char const* url = demuxer->stream->streaming_ctrl->url->url;
extern int verbose;
if (strcmp(protocol, "rtsp") == 0) {
rtspClient = RTSPClient::createNew(*env, verbose, "MPlayer");
if (rtspClient == NULL) {
fprintf(stderr, "Failed to create RTSP client: %s\n",
env->getResultMsg());
break;
}
sdpDescription = openURL_rtsp(rtspClient, url);
} else { // SIP
unsigned char desiredAudioType = 0; // PCMU (use 3 for GSM)
sipClient = SIPClient::createNew(*env, desiredAudioType, NULL,
verbose, "MPlayer");
if (sipClient == NULL) {
fprintf(stderr, "Failed to create SIP client: %s\n",
env->getResultMsg());
break;
}
sipClient->setClientStartPortNum(8000);
sdpDescription = openURL_sip(sipClient, url);
}
if (sdpDescription == NULL) {
fprintf(stderr, "Failed to get a SDP description from URL \"%s\": %s\n",
url, env->getResultMsg());
break;
}
}
// Now that we have a SDP description, create a MediaSession from it:
MediaSession* mediaSession = MediaSession::createNew(*env, sdpDescription);
if (mediaSession == NULL) break;
// Create a 'RTPState' structure containing the state that we just created,
// and store it in the demuxer's 'priv' field, for future reference:
RTPState* rtpState = new RTPState;
rtpState->sdpDescription = sdpDescription;
rtpState->rtspClient = rtspClient;
rtpState->sipClient = sipClient;
rtpState->mediaSession = mediaSession;
rtpState->audioBufferQueue = rtpState->videoBufferQueue = NULL;
rtpState->flags = 0;
rtpState->firstSyncTime.tv_sec = rtpState->firstSyncTime.tv_usec = 0;
demuxer->priv = rtpState;
int audiofound = 0, videofound = 0;
// Create RTP receivers (sources) for each subsession:
MediaSubsessionIterator iter(*mediaSession);
MediaSubsession* subsession;
unsigned desiredReceiveBufferSize;
while ((subsession = iter.next()) != NULL) {
// Ignore any subsession that's not audio or video:
if (strcmp(subsession->mediumName(), "audio") == 0) {
if (audiofound) {
fprintf(stderr, "Additional subsession \"audio/%s\" skipped\n", subsession->codecName());
continue;
}
desiredReceiveBufferSize = 100000;
} else if (strcmp(subsession->mediumName(), "video") == 0) {
if (videofound) {
fprintf(stderr, "Additional subsession \"video/%s\" skipped\n", subsession->codecName());
continue;
}
desiredReceiveBufferSize = 2000000;
} else {
continue;
}
if (rtsp_port)
subsession->setClientPortNum (rtsp_port);
if (!subsession->initiate()) {
fprintf(stderr, "Failed to initiate \"%s/%s\" RTP subsession: %s\n", subsession->mediumName(), subsession->codecName(), env->getResultMsg());
} else {
fprintf(stderr, "Initiated \"%s/%s\" RTP subsession on port %d\n", subsession->mediumName(), subsession->codecName(), subsession->clientPortNum());
//.........这里部分代码省略.........
示例11: main
//.........这里部分代码省略.........
// init logger
initLogger(verbose);
// create live555 environment
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
// split multicast info
std::istringstream is(maddr);
std::string ip;
getline(is, ip, ':');
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
if (!ip.empty())
{
destinationAddress.s_addr = inet_addr(ip.c_str());
}
std::string port;
getline(is, port, ':');
unsigned short rtpPortNum = 20000;
if (!port.empty())
{
rtpPortNum = atoi(port.c_str());
}
unsigned short rtcpPortNum = rtpPortNum+1;
unsigned char ttl = 5;
// create RTSP server
RTSPServer* rtspServer = createRTSPServer(*env, rtspPort, rtspOverHTTPPort, timeout);
if (rtspServer == NULL)
{
LOG(ERROR) << "Failed to create RTSP server: " << env->getResultMsg();
}
else
{
int nbSource = 0;
std::list<std::string>::iterator devIt;
for ( devIt=devList.begin() ; devIt!=devList.end() ; ++devIt)
{
std::string deviceName(*devIt);
// Init capture
LOG(NOTICE) << "Create V4L2 Source..." << deviceName;
V4L2DeviceParameters param(deviceName.c_str(),format,width,height,fps, verbose);
V4l2Capture* videoCapture = V4l2DeviceFactory::CreateVideoCapure(param, useMmap);
if (videoCapture)
{
nbSource++;
format = videoCapture->getFormat();
int outfd = -1;
V4l2Output* out = NULL;
if (!outputFile.empty())
{
V4L2DeviceParameters outparam(outputFile.c_str(), videoCapture->getFormat(), videoCapture->getWidth(), videoCapture->getHeight(), 0,verbose);
V4l2Output* out = V4l2DeviceFactory::CreateVideoOutput(outparam, useMmap);
if (out != NULL)
{
outfd = out->getFd();
}
}
LOG(NOTICE) << "Start V4L2 Capture..." << deviceName;
if (!videoCapture->captureStart())
示例12: main
int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord("username1", "password1"); // replace these with real strings
// Repeat the above with each <username>, <password> that you wish to allow
// access to the server.
#endif
// Create the RTSP server. Try first with the default port number (554),
// and then with the alternative port number (8554):
RTSPServer* rtspServer;
portNumBits rtspServerPortNum = 554; //先使用554默认端口创建RTSP server
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
if (rtspServer == NULL) { //若使用554端口创建失败,则使用8554端口创建 Server
rtspServerPortNum = 8554;
rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
}
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
*env << "LIVE555 Media Server\n";
*env << "\tversion " << MEDIA_SERVER_VERSION_STRING
<< " (LIVE555 Streaming Media library version "
<< LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n";
char* urlPrefix = rtspServer->rtspURLPrefix();
*env << "Play streams from this server using the URL\n\t"
<< urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n";
*env << "Each file's type is inferred from its name suffix:\n";
*env << "\t\".264\" => a H.264 Video Elementary Stream file\n";
*env << "\t\".265\" => a H.265 Video Elementary Stream file\n";
*env << "\t\".aac\" => an AAC Audio (ADTS format) file\n";
*env << "\t\".ac3\" => an AC-3 Audio file\n";
*env << "\t\".amr\" => an AMR Audio file\n";
*env << "\t\".dv\" => a DV Video file\n";
*env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n";
*env << "\t\".mkv\" => a Matroska audio+video+(optional)subtitles file\n";
*env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n";
*env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n";
*env << "\t\".ogg\" or \".ogv\" or \".opus\" => an Ogg audio and/or video file\n";
*env << "\t\".ts\" => a MPEG Transport Stream file\n";
*env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n";
*env << "\t\".vob\" => a VOB (MPEG-2 video with AC-3 audio) file\n";
*env << "\t\".wav\" => a WAV Audio file\n";
*env << "\t\".webm\" => a WebM audio(Vorbis)+video(VP8) file\n";
*env << "See http://www.live555.com/mediaServer/ for additional documentation.\n";
// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
// Try first with the default HTTP port (80), and then with the alternative HTTP
// port numbers (8000 and 8080).
if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
*env << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n";
} else {
*env << "(RTSP-over-HTTP tunneling is not available.)\n";
}
env->taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
}
示例13: srcPort
SIPClient::SIPClient(UsageEnvironment& env,
unsigned char desiredAudioRTPPayloadFormat,
char const* mimeSubtype,
int verbosityLevel, char const* applicationName)
: Medium(env),
fT1(500000 /* 500 ms */),
fDesiredAudioRTPPayloadFormat(desiredAudioRTPPayloadFormat),
fVerbosityLevel(verbosityLevel), fCSeq(0),
fUserAgentHeaderStr(NULL), fUserAgentHeaderStrLen(0),
fURL(NULL), fURLSize(0),
fToTagStr(NULL), fToTagStrSize(0),
fUserName(NULL), fUserNameSize(0),
fInviteSDPDescription(NULL), fInviteSDPDescriptionReturned(NULL),
fInviteCmd(NULL), fInviteCmdSize(0) {
if (mimeSubtype == NULL) mimeSubtype = "";
fMIMESubtype = strDup(mimeSubtype);
fMIMESubtypeSize = strlen(fMIMESubtype);
if (applicationName == NULL) applicationName = "";
fApplicationName = strDup(applicationName);
fApplicationNameSize = strlen(fApplicationName);
struct in_addr ourAddress;
ourAddress.s_addr = ourIPAddress(env); // hack
fOurAddressStr = strDup(AddressString(ourAddress).val());
fOurAddressStrSize = strlen(fOurAddressStr);
fOurSocket = new Groupsock(env, ourAddress, 0, 255);
if (fOurSocket == NULL) {
env << "ERROR: Failed to create socket for addr "
<< fOurAddressStr << ": "
<< env.getResultMsg() << "\n";
}
// Now, find out our source port number. Hack: Do this by first trying to
// send a 0-length packet, so that the "getSourcePort()" call will work.
fOurSocket->output(envir(), (unsigned char*)"", 0);
Port srcPort(0);
getSourcePort(env, fOurSocket->socketNum(), srcPort);
if (srcPort.num() != 0) {
fOurPortNum = ntohs(srcPort.num());
} else {
// No luck. Try again using a default port number:
fOurPortNum = 5060;
delete fOurSocket;
fOurSocket = new Groupsock(env, ourAddress, fOurPortNum, 255);
if (fOurSocket == NULL) {
env << "ERROR: Failed to create socket for addr "
<< fOurAddressStr << ", port "
<< fOurPortNum << ": "
<< env.getResultMsg() << "\n";
}
}
// Set the "User-Agent:" header to use in each request:
char const* const libName = "LIVE555 Streaming Media v";
char const* const libVersionStr = LIVEMEDIA_LIBRARY_VERSION_STRING;
char const* libPrefix; char const* libSuffix;
if (applicationName == NULL || applicationName[0] == '\0') {
applicationName = libPrefix = libSuffix = "";
} else {
libPrefix = " (";
libSuffix = ")";
}
unsigned userAgentNameSize
= fApplicationNameSize + strlen(libPrefix) + strlen(libName) + strlen(libVersionStr) + strlen(libSuffix) + 1;
char* userAgentName = new char[userAgentNameSize];
sprintf(userAgentName, "%s%s%s%s%s",
applicationName, libPrefix, libName, libVersionStr, libSuffix);
setUserAgentString(userAgentName);
delete[] userAgentName;
reset();
}
示例14: main
int main(int argc, char** argv) {
init_signals();
setpriority(PRIO_PROCESS, 0, 0);
int IsSilence = 0;
int svcEnable = 0;
int cnt=0;
int activePortCnt=0;
if( GetSampleRate() == 16000 )
{
audioOutputBitrate = 128000;
audioSamplingFrequency = 16000;
}else{
audioOutputBitrate = 64000;
audioSamplingFrequency = 8000;
}
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
int msg_type, video_type;
APPROInput* MjpegInputDevice = NULL;
APPROInput* H264InputDevice = NULL;
APPROInput* Mpeg4InputDevice = NULL;
static pid_t child[4] = {
-1,-1,-1,-1
};
StreamingMode streamingMode = STREAMING_UNICAST;
netAddressBits multicastAddress = 0;//our_inet_addr("224.1.4.6");
portNumBits videoRTPPortNum = 0;
portNumBits audioRTPPortNum = 0;
IsSilence = 0;
svcEnable = 0;
audioType = AUDIO_G711;
streamingMode = STREAMING_UNICAST;
for( cnt = 1; cnt < argc ;cnt++ )
{
if( strcmp( argv[cnt],"-m" )== 0 )
{
streamingMode = STREAMING_MULTICAST_SSM;
}
if( strcmp( argv[cnt],"-s" )== 0 )
{
IsSilence = 1;
}
if( strcmp( argv[cnt],"-a" )== 0 )
{
audioType = AUDIO_AAC;
}
if( strcmp( argv[cnt],"-v" )== 0 )
{
svcEnable = 1;
}
}
#if 0
printf("###########IsSilence = %d ################\n",IsSilence);
printf("###########streamingMode = %d ################\n",streamingMode);
printf("###########audioType = %d ################\n",audioType);
printf("###########svcEnable = %d ################\n",svcEnable);
#endif
child[0] = fork();
if( child[0] != 0 )
{
child[1] = fork();
}
if( child[0] != 0 && child[1] != 0 )
{
child[2] = fork();
}
if( child[0] != 0 && child[1] != 0 && child[2] != 0 )
{
child[3] = fork();
}
if(svcEnable) {
if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0)
{
child[4] = fork();
}
if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0)
{
child[5] = fork();
}
if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0)
{
child[6] = fork();
}
if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0)
//.........这里部分代码省略.........
示例15: setupDarwinStreaming
//.........这里部分代码省略.........
totalSessionBandwidthAudio, CNAME,
sinkAudio, NULL /* we're a server */);
// Note: This starts RTCP running automatically
// Add these to our 'Darwin injector':
injector->addStream(sinkAudio, rtcpAudio);
}
}
/******************end audio***********************/
/******************video***********************/
if (videoFormat != VFMT_NONE) {
// Create the video source:
if (packageFormat == PFMT_TRANSPORT_STREAM) {
MPEG2TransportStreamFromESSource* tsSource
= MPEG2TransportStreamFromESSource::createNew(env);
tsSource->addNewVideoSource(inputDevice.videoSource(), 2);
if (sourceAudio != NULL) tsSource->addNewAudioSource(sourceAudio, 2);
// Gather the Transport packets into network packet-sized chunks:
sourceVideo = MPEG2TransportStreamAccumulator::createNew(env, tsSource);
sourceAudio = NULL;
} else {
switch (videoFormat) {
case VFMT_NONE: // not used
break;
case VFMT_MJPEG: {
sourceVideo = WISJPEGStreamSource::createNew(inputDevice.videoSource());
break;
}
case VFMT_MPEG1:
case VFMT_MPEG2: {
sourceVideo = MPEG1or2VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
break;
}
case VFMT_MPEG4: {
sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
break;
}
}
}
// Create 'groupsocks' for RTP and RTCP.
// (Note: Because we will actually be streaming through a remote Darwin server,
// via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
struct in_addr dummyDestAddress;
dummyDestAddress.s_addr = 0;
rtpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);
rtcpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);
// Create a RTP sink for the video stream:
unsigned char payloadFormatCode = 97; // if dynamic
setVideoRTPSinkBufferSize();
if (packageFormat == PFMT_TRANSPORT_STREAM) {
sinkVideo = SimpleRTPSink::createNew(env, rtpGroupsockVideo,
33, 90000, "video", "mp2t",
1, True, False/*no 'M' bit*/);
} else {
switch (videoFormat) {
case VFMT_NONE: // not used
break;
case VFMT_MJPEG: {
sinkVideo = JPEGVideoRTPSink::createNew(env, rtpGroupsockVideo);
break;
}
case VFMT_MPEG1:
case VFMT_MPEG2: {
sinkVideo = MPEG1or2VideoRTPSink::createNew(env, rtpGroupsockVideo);
break;
}
case VFMT_MPEG4: {
sinkVideo = MPEG4ESVideoRTPSink::createNew(env, rtpGroupsockVideo, payloadFormatCode);
break;
}
}
}
// Create (and start) a 'RTCP instance' for this RTP sink:
unsigned totalSessionBandwidthVideo = (videoBitrate+500)/1000; // in kbps; for RTCP b/w share
rtcpVideo = RTCPInstance::createNew(env, rtcpGroupsockVideo,
totalSessionBandwidthVideo, CNAME,
sinkVideo, NULL /* we're a server */);
// Note: This starts RTCP running automatically
// Add these to our 'Darwin injector':
injector->addStream(sinkVideo, rtcpVideo);
}
/******************end video***********************/
// Next, specify the destination Darwin Streaming Server:
char const* remoteStreamName = "test.sdp";//#####@@@@@
if (!injector->setDestination(remoteDSSNameOrAddress, remoteStreamName,
applicationName, "LIVE555 Streaming Media")) {
env << "Failed to connect to remote Darwin Streaming Server: " << env.getResultMsg() << "\n";
exit(1);
}
env << "Play this stream (from the Darwin Streaming Server) using the URL:\n"
<< "\trtsp://" << remoteDSSNameOrAddress << "/" << remoteStreamName << "\n";
}