本文整理汇总了C#中MediaBrowser.Model.Dto.MediaSourceInfo.GetStreamCount方法的典型用法代码示例。如果您正苦于以下问题:C# MediaSourceInfo.GetStreamCount方法的具体用法?C# MediaSourceInfo.GetStreamCount怎么用?C# MediaSourceInfo.GetStreamCount使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MediaBrowser.Model.Dto.MediaSourceInfo
的用法示例。
在下文中一共展示了MediaSourceInfo.GetStreamCount方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GetVideoDirectPlayProfile
private PlayMethod? GetVideoDirectPlayProfile(DeviceProfile profile,
MediaSourceInfo mediaSource,
MediaStream videoStream,
MediaStream audioStream,
bool isEligibleForDirectPlay,
bool isEligibleForDirectStream)
{
// See if it can be direct played
DirectPlayProfile directPlay = null;
foreach (DirectPlayProfile i in profile.DirectPlayProfiles)
{
if (i.Type == DlnaProfileType.Video && IsVideoDirectPlaySupported(i, mediaSource, videoStream, audioStream))
{
directPlay = i;
break;
}
}
if (directPlay == null)
{
_logger.Debug("Profile: {0}, No direct play profiles found for Path: {1}",
profile.Name ?? "Unknown Profile",
mediaSource.Path ?? "Unknown path");
return null;
}
string container = mediaSource.Container;
List<ProfileCondition> conditions = new List<ProfileCondition>();
foreach (ContainerProfile i in profile.ContainerProfiles)
{
if (i.Type == DlnaProfileType.Video &&
ListHelper.ContainsIgnoreCase(i.GetContainers(), container))
{
foreach (ProfileCondition c in i.Conditions)
{
conditions.Add(c);
}
}
}
ConditionProcessor conditionProcessor = new ConditionProcessor();
int? width = videoStream == null ? null : videoStream.Width;
int? height = videoStream == null ? null : videoStream.Height;
int? bitDepth = videoStream == null ? null : videoStream.BitDepth;
int? videoBitrate = videoStream == null ? null : videoStream.BitRate;
double? videoLevel = videoStream == null ? null : videoStream.Level;
string videoProfile = videoStream == null ? null : videoStream.Profile;
float? videoFramerate = videoStream == null ? null : videoStream.AverageFrameRate ?? videoStream.AverageFrameRate;
bool? isAnamorphic = videoStream == null ? null : videoStream.IsAnamorphic;
bool? isCabac = videoStream == null ? null : videoStream.IsCabac;
int? audioBitrate = audioStream == null ? null : audioStream.BitRate;
int? audioChannels = audioStream == null ? null : audioStream.Channels;
string audioProfile = audioStream == null ? null : audioStream.Profile;
TransportStreamTimestamp? timestamp = videoStream == null ? TransportStreamTimestamp.None : mediaSource.Timestamp;
int? packetLength = videoStream == null ? null : videoStream.PacketLength;
int? refFrames = videoStream == null ? null : videoStream.RefFrames;
int? numAudioStreams = mediaSource.GetStreamCount(MediaStreamType.Audio);
int? numVideoStreams = mediaSource.GetStreamCount(MediaStreamType.Video);
// Check container conditions
foreach (ProfileCondition i in conditions)
{
if (!conditionProcessor.IsVideoConditionSatisfied(i, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isCabac, refFrames, numVideoStreams, numAudioStreams))
{
LogConditionFailure(profile, "VideoContainerProfile", i, mediaSource);
return null;
}
}
string videoCodec = videoStream == null ? null : videoStream.Codec;
if (string.IsNullOrEmpty(videoCodec))
{
_logger.Debug("Profile: {0}, DirectPlay=false. Reason=Unknown video codec. Path: {1}",
profile.Name ?? "Unknown Profile",
mediaSource.Path ?? "Unknown path");
return null;
}
conditions = new List<ProfileCondition>();
foreach (CodecProfile i in profile.CodecProfiles)
{
if (i.Type == CodecType.Video && i.ContainsCodec(videoCodec, container))
{
foreach (ProfileCondition c in i.Conditions)
{
conditions.Add(c);
}
}
}
foreach (ProfileCondition i in conditions)
//.........这里部分代码省略.........
示例2: BuildVideoItem
//.........这里部分代码省略.........
bool? isSecondaryAudio = audioStream == null ? null : item.IsSecondaryAudio(audioStream);
int? inputAudioBitrate = audioStream == null ? null : audioStream.BitRate;
int? audioChannels = audioStream == null ? null : audioStream.Channels;
string audioProfile = audioStream == null ? null : audioStream.Profile;
if (!conditionProcessor.IsVideoAudioConditionSatisfied(applyCondition, audioChannels, inputAudioBitrate, audioProfile, isSecondaryAudio))
{
LogConditionFailure(options.Profile, "AudioCodecProfile", applyCondition, item);
applyConditions = false;
break;
}
}
if (applyConditions)
{
foreach (ProfileCondition c in i.Conditions)
{
videoTranscodingConditions.Add(c);
}
break;
}
}
}
ApplyTranscodingConditions(playlistItem, videoTranscodingConditions);
List<ProfileCondition> audioTranscodingConditions = new List<ProfileCondition>();
foreach (CodecProfile i in options.Profile.CodecProfiles)
{
if (i.Type == CodecType.VideoAudio && i.ContainsCodec(playlistItem.TargetAudioCodec, transcodingProfile.Container))
{
bool applyConditions = true;
foreach (ProfileCondition applyCondition in i.ApplyConditions)
{
int? width = videoStream == null ? null : videoStream.Width;
int? height = videoStream == null ? null : videoStream.Height;
int? bitDepth = videoStream == null ? null : videoStream.BitDepth;
int? videoBitrate = videoStream == null ? null : videoStream.BitRate;
double? videoLevel = videoStream == null ? null : videoStream.Level;
string videoProfile = videoStream == null ? null : videoStream.Profile;
float? videoFramerate = videoStream == null ? null : videoStream.AverageFrameRate ?? videoStream.AverageFrameRate;
bool? isAnamorphic = videoStream == null ? null : videoStream.IsAnamorphic;
string videoCodecTag = videoStream == null ? null : videoStream.CodecTag;
TransportStreamTimestamp? timestamp = videoStream == null ? TransportStreamTimestamp.None : item.Timestamp;
int? packetLength = videoStream == null ? null : videoStream.PacketLength;
int? refFrames = videoStream == null ? null : videoStream.RefFrames;
int? numAudioStreams = item.GetStreamCount(MediaStreamType.Audio);
int? numVideoStreams = item.GetStreamCount(MediaStreamType.Video);
if (!conditionProcessor.IsVideoConditionSatisfied(applyCondition, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, refFrames, numVideoStreams, numAudioStreams, videoCodecTag))
{
LogConditionFailure(options.Profile, "VideoCodecProfile", applyCondition, item);
applyConditions = false;
break;
}
}
if (applyConditions)
{
foreach (ProfileCondition c in i.Conditions)
{
audioTranscodingConditions.Add(c);
}
break;
}
}
}
ApplyTranscodingConditions(playlistItem, audioTranscodingConditions);
// Honor requested max channels
if (options.MaxAudioChannels.HasValue)
{
int currentValue = playlistItem.MaxAudioChannels ?? options.MaxAudioChannels.Value;
playlistItem.MaxAudioChannels = Math.Min(options.MaxAudioChannels.Value, currentValue);
}
int audioBitrate = GetAudioBitrate(playlistItem.SubProtocol, options.GetMaxBitrate(false), playlistItem.TargetAudioChannels, playlistItem.TargetAudioCodec, audioStream);
playlistItem.AudioBitrate = Math.Min(playlistItem.AudioBitrate ?? audioBitrate, audioBitrate);
int? maxBitrateSetting = options.GetMaxBitrate(false);
// Honor max rate
if (maxBitrateSetting.HasValue)
{
int videoBitrate = maxBitrateSetting.Value;
if (playlistItem.AudioBitrate.HasValue)
{
videoBitrate -= playlistItem.AudioBitrate.Value;
}
// Make sure the video bitrate is lower than bitrate settings but at least 64k
int currentValue = playlistItem.VideoBitrate ?? videoBitrate;
playlistItem.VideoBitrate = Math.Max(Math.Min(videoBitrate, currentValue), 64000);
}
}
return playlistItem;
}