本文整理汇总了C++中OMXClient::disconnect方法的典型用法代码示例。如果您正苦于以下问题:C++ OMXClient::disconnect方法的具体用法?C++ OMXClient::disconnect怎么用?C++ OMXClient::disconnect使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类OMXClient
的用法示例。
在下文中一共展示了OMXClient::disconnect方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
int main(int argc, char **argv) {
android::ProcessState::self()->startThreadPool();
OMXClient client;
CHECK_EQ(client.connect(), OK);
const int32_t kSampleRate = 22050;
const int32_t kNumChannels = 2;
sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels);
#if 0
sp<MediaPlayerBase::AudioSink> audioSink;
AudioPlayer *player = new AudioPlayer(audioSink);
player->setSource(audioSource);
player->start();
sleep(10);
player->stop();
#endif
sp<MetaData> encMeta = new MetaData;
encMeta->setCString(kKeyMIMEType,
1 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
encMeta->setInt32(kKeySampleRate, kSampleRate);
encMeta->setInt32(kKeyChannelCount, kNumChannels);
encMeta->setInt32(kKeyMaxInputSize, 8192);
sp<MediaSource> encoder =
OMXCodec::Create(client.interface(), encMeta, true, audioSource);
encoder->start();
int32_t n = 0;
status_t err;
MediaBuffer *buffer;
while ((err = encoder->read(&buffer)) == OK) {
printf(".");
fflush(stdout);
buffer->release();
buffer = NULL;
if (++n == 100) {
break;
}
}
printf("$\n");
encoder->stop();
client.disconnect();
return 0;
}
示例2: VideoEditorVideoEncoder_getDSI
M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
sp<MetaData> metaData) {
M4OSA_ERR err = M4NO_ERROR;
VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
status_t result = OK;
int32_t nbBuffer = 0;
int32_t stride = 0;
int32_t height = 0;
int32_t framerate = 0;
int32_t isCodecConfig = 0;
size_t size = 0;
uint32_t codecFlags = 0;
MediaBuffer* inputBuffer = NULL;
MediaBuffer* outputBuffer = NULL;
sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
sp<MediaSource> encoder = NULL;;
OMXClient client;
ALOGV("VideoEditorVideoEncoder_getDSI begin");
// Input parameters check
VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);
VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);
pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);
// Create the encoder source
encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);
// Connect to the OMX client
result = client.connect();
VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
// Create the OMX codec
// VIDEOEDITOR_FORCECODEC MUST be defined here
codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
encoder = OMXCodec::Create(client.interface(), metaData, true,
encoderSource, NULL, codecFlags);
VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);
/**
* Send fake frames and retrieve the DSI
*/
// Send a fake frame to the source
metaData->findInt32(kKeyStride, &stride);
metaData->findInt32(kKeyHeight, &height);
metaData->findInt32(kKeySampleRate, &framerate);
size = (size_t)(stride*height*3)/2;
inputBuffer = new MediaBuffer(size);
inputBuffer->meta_data()->setInt64(kKeyTime, 0);
nbBuffer = encoderSource->storeBuffer(inputBuffer);
encoderSource->storeBuffer(NULL); // Signal EOS
// Call read once to get the DSI
result = encoder->start();;
VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
result = encoder->read(&outputBuffer, NULL);
VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);
VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
// For H264, format the DSI
result = buildAVCCodecSpecificData(
(uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
(size_t*)(&(pEncoderContext->mHeader.Size)),
(const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
outputBuffer->range_length(), encoder->getFormat().get());
outputBuffer->release();
VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
} else {
// For MPEG4, just copy the DSI
pEncoderContext->mHeader.Size =
(M4OSA_UInt32)outputBuffer->range_length();
SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
pEncoderContext->mHeader.Size, "Encoder header");
memcpy((void *)pEncoderContext->mHeader.pBuf,
(void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
pEncoderContext->mHeader.Size);
outputBuffer->release();
}
result = encoder->stop();
VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
cleanUp:
// Destroy the graph
if ( encoder != NULL ) { encoder.clear(); }
client.disconnect();
if ( encoderSource != NULL ) { encoderSource.clear(); }
if ( M4NO_ERROR == err ) {
ALOGV("VideoEditorVideoEncoder_getDSI no error");
} else {
ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
}
ALOGV("VideoEditorVideoEncoder_getDSI end");
return err;
}
示例3: main
//.........这里部分代码省略.........
case 'w':
{
width = atoi(optarg);
break;
}
case 't':
{
height = atoi(optarg);
break;
}
case 'l':
{
level = atoi(optarg);
break;
}
case 'p':
{
profile = atoi(optarg);
break;
}
case 'v':
{
codec = atoi(optarg);
if (codec < 0 || codec > 2) {
usage(argv[0]);
}
break;
}
case 'h':
default:
{
usage(argv[0]);
break;
}
}
}
OMXClient client;
CHECK_EQ(client.connect(), OK);
status_t err = OK;
sp<MediaSource> source =
new DummySource(width, height, nFrames, frameRateFps, colorFormat);
sp<MetaData> enc_meta = new MetaData;
switch (codec) {
case 1:
enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
break;
case 2:
enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
break;
default:
enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
break;
}
enc_meta->setInt32(kKeyWidth, width);
enc_meta->setInt32(kKeyHeight, height);
enc_meta->setInt32(kKeyFrameRate, frameRateFps);
enc_meta->setInt32(kKeyBitRate, bitRateBps);
enc_meta->setInt32(kKeyStride, width);
enc_meta->setInt32(kKeySliceHeight, height);
enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
enc_meta->setInt32(kKeyColorFormat, colorFormat);
if (level != -1) {
enc_meta->setInt32(kKeyVideoLevel, level);
}
if (profile != -1) {
enc_meta->setInt32(kKeyVideoProfile, profile);
}
sp<MediaSource> encoder =
OMXCodec::Create(
client.interface(), enc_meta, true /* createEncoder */, source);
sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
writer->addSource(encoder);
int64_t start = systemTime();
CHECK_EQ(OK, writer->start());
while (!writer->reachedEOS()) {
}
err = writer->stop();
int64_t end = systemTime();
fprintf(stderr, "$\n");
client.disconnect();
if (err != OK && err != ERROR_END_OF_STREAM) {
fprintf(stderr, "record failed: %d\n", err);
return 1;
}
fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
return 0;
}
示例4: main
//.........这里部分代码省略.........
kKeyMIMEType, &mime));
bool useTrack = false;
if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
haveAudio = true;
useTrack = true;
} else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
haveVideo = true;
useTrack = true;
}
if (useTrack) {
mediaSources.push(source);
if (haveAudio && haveVideo) {
break;
}
}
}
} else {
sp<MetaData> meta;
size_t i;
for (i = 0; i < numTracks; ++i) {
meta = extractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
const char *mime;
meta->findCString(kKeyMIMEType, &mime);
if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
break;
}
if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
break;
}
meta = NULL;
}
if (meta == NULL) {
fprintf(stderr,
"No suitable %s track found. The '-a' option will "
"target audio tracks only, the default is to target "
"video tracks only.\n",
audioOnly ? "audio" : "video");
return -1;
}
int64_t thumbTimeUs;
if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
printf("thumbnailTime: %lld us (%.2f secs)\n",
thumbTimeUs, thumbTimeUs / 1E6);
}
mediaSource = extractor->getTrack(i);
}
}
if (gWriteMP4) {
writeSourcesToMP4(mediaSources, syncInfoPresent);
} else if (dumpStream) {
dumpSource(mediaSource, dumpStreamFilename);
} else if (dumpPCMStream) {
OMXClient client;
CHECK_EQ(client.connect(), (status_t)OK);
sp<MediaSource> decSource =
OMXCodec::Create(
client.interface(),
mediaSource->getFormat(),
false,
mediaSource,
0,
0);
dumpSource(decSource, dumpStreamFilename);
} else if (seekTest) {
performSeekTest(mediaSource);
} else {
playSource(&client, mediaSource);
}
}
if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
CHECK_EQ((status_t)OK,
native_window_api_disconnect(
gSurface.get(), NATIVE_WINDOW_API_MEDIA));
gSurface.clear();
if (useSurfaceAlloc) {
composerClient->dispose();
}
}
client.disconnect();
return 0;
}
示例5: main
//.........这里部分代码省略.........
dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);
syncInfoPresent = false;
} else {
extractor = MediaExtractor::Create(dataSource);
if (extractor == NULL) {
fprintf(stderr, "could not create extractor.\n");
return -1;
}
}
size_t numTracks = extractor->countTracks();
if (gWriteMP4) {
bool haveAudio = false;
bool haveVideo = false;
for (size_t i = 0; i < numTracks; ++i) {
sp<MediaSource> source = extractor->getTrack(i);
const char *mime;
CHECK(source->getFormat()->findCString(
kKeyMIMEType, &mime));
bool useTrack = false;
if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
haveAudio = true;
useTrack = true;
} else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
haveVideo = true;
useTrack = true;
}
if (useTrack) {
mediaSources.push(source);
if (haveAudio && haveVideo) {
break;
}
}
}
} else {
sp<MetaData> meta;
size_t i;
for (i = 0; i < numTracks; ++i) {
meta = extractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
const char *mime;
meta->findCString(kKeyMIMEType, &mime);
if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
break;
}
if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
break;
}
meta = NULL;
}
if (meta == NULL) {
fprintf(stderr,
"No suitable %s track found. The '-a' option will "
"target audio tracks only, the default is to target "
"video tracks only.\n",
audioOnly ? "audio" : "video");
return -1;
}
int64_t thumbTimeUs;
if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
printf("thumbnailTime: %lld us (%.2f secs)\n",
thumbTimeUs, thumbTimeUs / 1E6);
}
mediaSource = extractor->getTrack(i);
}
}
if (gWriteMP4) {
writeSourcesToMP4(mediaSources, syncInfoPresent);
} else if (seekTest) {
performSeekTest(mediaSource);
} else {
playSource(&client, mediaSource);
}
if (rtspController != NULL) {
rtspController->disconnect();
rtspController.clear();
sleep(3);
}
}
client.disconnect();
return 0;
}