本文整理汇总了C#中MediaBrowser.Controller.MediaEncoding.EncodingJobOptions类的典型用法代码示例。如果您正苦于以下问题:C# EncodingJobOptions类的具体用法?C# EncodingJobOptions怎么用?C# EncodingJobOptions使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
EncodingJobOptions类属于MediaBrowser.Controller.MediaEncoding命名空间,在下文中一共展示了EncodingJobOptions类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CreateJob
public async Task<EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
{
var request = options;
if (string.IsNullOrEmpty(request.AudioCodec))
{
request.AudioCodec = InferAudioCodec(request.OutputContainer);
}
var state = new EncodingJob(_logger, _mediaSourceManager)
{
Options = options,
IsVideoRequest = isVideoRequest,
Progress = progress
};
if (!string.IsNullOrWhiteSpace(request.AudioCodec))
{
state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault();
}
var item = _libraryManager.GetItemById(request.ItemId);
state.ItemType = item.GetType().Name;
state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false);
var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
? mediaSources.First()
: mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId));
AttachMediaStreamInfo(state, mediaSource, options);
state.OutputAudioBitrate = GetAudioBitrateParam(request, state.AudioStream);
state.OutputAudioSampleRate = request.AudioSampleRate;
state.OutputAudioCodec = GetAudioCodec(request);
state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec);
if (isVideoRequest)
{
state.OutputVideoCodec = GetVideoCodec(request);
state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream);
if (state.OutputVideoBitrate.HasValue)
{
var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value,
state.OutputVideoCodec,
request.MaxWidth,
request.MaxHeight);
request.MaxWidth = resolution.MaxWidth;
request.MaxHeight = resolution.MaxHeight;
}
}
ApplyDeviceProfileSettings(state);
TryStreamCopy(state, request);
return state;
}
示例2: EnforceResolutionLimit
/// <summary>
/// Enforces the resolution limit.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoRequest">The video request.</param>
private static void EnforceResolutionLimit(EncodingJob state, EncodingJobOptions videoRequest)
{
// Switch the incoming params to be ceilings rather than fixed values
videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width;
videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height;
videoRequest.Width = null;
videoRequest.Height = null;
}
示例3: GetNumAudioChannelsParam
/// <summary>
/// Gets the number of audio channels to specify on the command line
/// </summary>
/// <param name="request">The request.</param>
/// <param name="audioStream">The audio stream.</param>
/// <param name="outputAudioCodec">The output audio codec.</param>
/// <returns>System.Nullable{System.Int32}.</returns>
private int? GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec)
{
var inputChannels = audioStream == null
? null
: audioStream.Channels;
if (inputChannels <= 0)
{
inputChannels = null;
}
var codec = outputAudioCodec ?? string.Empty;
if (codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1)
{
// wmav2 currently only supports two channel output
return Math.Min(2, inputChannels ?? 2);
}
if (request.MaxAudioChannels.HasValue)
{
if (inputChannels.HasValue)
{
return Math.Min(request.MaxAudioChannels.Value, inputChannels.Value);
}
var channelLimit = codec.IndexOf("mp3", StringComparison.OrdinalIgnoreCase) != -1
? 2
: 6;
// If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels
return Math.Min(request.MaxAudioChannels.Value, channelLimit);
}
return request.AudioChannels;
}
示例4: GetVideoBitrateParamValue
private int? GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream, string outputVideoCodec)
{
var bitrate = request.VideoBitRate;
if (videoStream != null)
{
var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue &&
request.Height.Value > videoStream.Height.Value;
if (request.Width.HasValue && videoStream.Width.HasValue &&
request.Width.Value > videoStream.Width.Value)
{
isUpscaling = true;
}
// Don't allow bitrate increases unless upscaling
if (!isUpscaling)
{
if (bitrate.HasValue && videoStream.BitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value);
}
}
}
if (bitrate.HasValue)
{
var inputVideoCodec = videoStream == null ? null : videoStream.Codec;
bitrate = ResolutionNormalizer.ScaleBitrate(bitrate.Value, inputVideoCodec, outputVideoCodec);
// If a max bitrate was requested, don't let the scaled bitrate exceed it
if (request.VideoBitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, request.VideoBitRate.Value);
}
}
return bitrate;
}
示例5: AttachMediaStreamInfo
internal static void AttachMediaStreamInfo(EncodingJob state,
MediaSourceInfo mediaSource,
EncodingJobOptions videoRequest)
{
state.MediaPath = mediaSource.Path;
state.InputProtocol = mediaSource.Protocol;
state.InputContainer = mediaSource.Container;
state.InputFileSize = mediaSource.Size;
state.InputBitrate = mediaSource.Bitrate;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
if (mediaSource.ReadAtNativeFramerate)
{
state.ReadInputAtNativeFramerate = true;
}
if (mediaSource.VideoType.HasValue)
{
state.VideoType = mediaSource.VideoType.Value;
}
state.IsoType = mediaSource.IsoType;
state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();
if (mediaSource.Timestamp.HasValue)
{
state.InputTimestamp = mediaSource.Timestamp.Value;
}
state.InputProtocol = mediaSource.Protocol;
state.MediaPath = mediaSource.Path;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
state.InputBitrate = mediaSource.Bitrate;
state.InputFileSize = mediaSource.Size;
if (state.ReadInputAtNativeFramerate ||
mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase))
{
state.OutputAudioSync = "1000";
state.InputVideoSync = "-1";
state.InputAudioSync = "1";
}
var mediaStreams = mediaSource.MediaStreams;
if (videoRequest != null)
{
if (string.IsNullOrEmpty(videoRequest.VideoCodec))
{
videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
}
state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);
if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
{
state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
}
if (state.VideoStream != null && state.VideoStream.IsInterlaced)
{
state.DeInterlace = true;
}
EnforceResolutionLimit(state, videoRequest);
}
else
{
state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
}
state.MediaSource = mediaSource;
}
示例6: GetAudioCodec
/// <summary>
/// Gets the name of the output audio codec
/// </summary>
/// <param name="request">The request.</param>
/// <returns>System.String.</returns>
private string GetAudioCodec(EncodingJobOptions request)
{
var codec = request.AudioCodec;
if (string.Equals(codec, "aac", StringComparison.OrdinalIgnoreCase))
{
return "aac -strict experimental";
}
if (string.Equals(codec, "mp3", StringComparison.OrdinalIgnoreCase))
{
return "libmp3lame";
}
if (string.Equals(codec, "vorbis", StringComparison.OrdinalIgnoreCase))
{
return "libvorbis";
}
if (string.Equals(codec, "wma", StringComparison.OrdinalIgnoreCase))
{
return "wmav2";
}
return (codec ?? string.Empty).ToLower();
}
示例7: CanStreamCopyVideo
internal static bool CanStreamCopyVideo(EncodingJobOptions request, MediaStream videoStream)
{
if (videoStream.IsInterlaced)
{
return false;
}
if (videoStream.IsAnamorphic ?? false)
{
return false;
}
// Can't stream copy if we're burning in subtitles
if (request.SubtitleStreamIndex.HasValue)
{
if (request.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
return false;
}
}
// Source and target codecs must match
if (!string.Equals(request.VideoCodec, videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return false;
}
// If client is requesting a specific video profile, it must match the source
if (!string.IsNullOrEmpty(request.Profile))
{
if (string.IsNullOrEmpty(videoStream.Profile))
{
return false;
}
if (!string.Equals(request.Profile, videoStream.Profile, StringComparison.OrdinalIgnoreCase))
{
var currentScore = GetVideoProfileScore(videoStream.Profile);
var requestedScore = GetVideoProfileScore(request.Profile);
if (currentScore == -1 || currentScore > requestedScore)
{
return false;
}
}
}
// Video width must fall within requested value
if (request.MaxWidth.HasValue)
{
if (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value)
{
return false;
}
}
// Video height must fall within requested value
if (request.MaxHeight.HasValue)
{
if (!videoStream.Height.HasValue || videoStream.Height.Value > request.MaxHeight.Value)
{
return false;
}
}
// Video framerate must fall within requested value
var requestedFramerate = request.MaxFramerate ?? request.Framerate;
if (requestedFramerate.HasValue)
{
var videoFrameRate = videoStream.AverageFrameRate ?? videoStream.RealFrameRate;
if (!videoFrameRate.HasValue || videoFrameRate.Value > requestedFramerate.Value)
{
return false;
}
}
// Video bitrate must fall within requested value
if (request.VideoBitRate.HasValue)
{
if (!videoStream.BitRate.HasValue || videoStream.BitRate.Value > request.VideoBitRate.Value)
{
return false;
}
}
if (request.MaxVideoBitDepth.HasValue)
{
if (videoStream.BitDepth.HasValue && videoStream.BitDepth.Value > request.MaxVideoBitDepth.Value)
{
return false;
}
}
if (request.MaxRefFrames.HasValue)
{
if (videoStream.RefFrames.HasValue && videoStream.RefFrames.Value > request.MaxRefFrames.Value)
{
return false;
}
//.........这里部分代码省略.........
示例8: GetFastSeekCommandLineParameter
/// <summary>
/// Gets the fast seek command line parameter.
/// </summary>
/// <param name="options">The options.</param>
/// <returns>System.String.</returns>
/// <value>The fast seek command line parameter.</value>
protected string GetFastSeekCommandLineParameter(EncodingJobOptions options)
{
var time = options.StartTimeTicks;
if (time.HasValue && time.Value > 0)
{
return string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time.Value));
}
return string.Empty;
}
示例9: AttachMediaStreamInfo
private void AttachMediaStreamInfo(EncodingJob state,
MediaSourceInfo mediaSource,
EncodingJobOptions videoRequest)
{
EncodingJobFactory.AttachMediaStreamInfo(state, mediaSource, videoRequest);
}
示例10: GetFastSeekCommandLineParameter
/// <summary>
/// Gets the fast seek command line parameter.
/// </summary>
/// <param name="request">The request.</param>
/// <returns>System.String.</returns>
/// <value>The fast seek command line parameter.</value>
protected string GetFastSeekCommandLineParameter(EncodingJobOptions request)
{
var time = request.StartTimeTicks ?? 0;
if (time > 0)
{
return string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time));
}
return string.Empty;
}
示例11: GetWorkingDirectory
protected virtual string GetWorkingDirectory(EncodingJobOptions options)
{
return null;
}
示例12: GetNumAudioChannelsParam
/// <summary>
/// Gets the number of audio channels to specify on the command line
/// </summary>
/// <param name="request">The request.</param>
/// <param name="audioStream">The audio stream.</param>
/// <param name="outputAudioCodec">The output audio codec.</param>
/// <returns>System.Nullable{System.Int32}.</returns>
private int? GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec)
{
if (audioStream != null)
{
var codec = outputAudioCodec ?? string.Empty;
if (audioStream.Channels > 2 && codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1)
{
// wmav2 currently only supports two channel output
return 2;
}
}
if (request.MaxAudioChannels.HasValue)
{
if (audioStream != null && audioStream.Channels.HasValue)
{
return Math.Min(request.MaxAudioChannels.Value, audioStream.Channels.Value);
}
// If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels
return Math.Min(request.MaxAudioChannels.Value, 5);
}
return request.AudioChannels;
}
示例13: CreateJob
public async Task<EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
{
var request = options;
if (string.IsNullOrEmpty(request.AudioCodec))
{
request.AudioCodec = InferAudioCodec(request.OutputContainer);
}
var state = new EncodingJob(_logger, _liveTvManager)
{
Options = options,
IsVideoRequest = isVideoRequest,
Progress = progress
};
if (!string.IsNullOrWhiteSpace(request.AudioCodec))
{
state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault();
}
var item = _libraryManager.GetItemById(request.ItemId);
List<MediaStream> mediaStreams = null;
state.ItemType = item.GetType().Name;
if (item is ILiveTvRecording)
{
var recording = await _liveTvManager.GetInternalRecording(request.ItemId, cancellationToken).ConfigureAwait(false);
state.VideoType = VideoType.VideoFile;
state.IsInputVideo = string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
var path = recording.RecordingInfo.Path;
var mediaUrl = recording.RecordingInfo.Url;
var source = string.IsNullOrEmpty(request.MediaSourceId)
? recording.GetMediaSources(false).First()
: recording.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId));
mediaStreams = source.MediaStreams;
// Just to prevent this from being null and causing other methods to fail
state.MediaPath = string.Empty;
if (!string.IsNullOrEmpty(path))
{
state.MediaPath = path;
state.InputProtocol = MediaProtocol.File;
}
else if (!string.IsNullOrEmpty(mediaUrl))
{
state.MediaPath = mediaUrl;
state.InputProtocol = MediaProtocol.Http;
}
state.RunTimeTicks = recording.RunTimeTicks;
state.DeInterlace = true;
state.OutputAudioSync = "1000";
state.InputVideoSync = "-1";
state.InputAudioSync = "1";
state.InputContainer = recording.Container;
state.ReadInputAtNativeFramerate = source.ReadAtNativeFramerate;
}
else if (item is LiveTvChannel)
{
var channel = _liveTvManager.GetInternalChannel(request.ItemId);
state.VideoType = VideoType.VideoFile;
state.IsInputVideo = string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
mediaStreams = new List<MediaStream>();
state.DeInterlace = true;
// Just to prevent this from being null and causing other methods to fail
state.MediaPath = string.Empty;
}
else if (item is IChannelMediaItem)
{
var mediaSource = await GetChannelMediaInfo(request.ItemId, request.MediaSourceId, cancellationToken).ConfigureAwait(false);
state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
state.InputProtocol = mediaSource.Protocol;
state.MediaPath = mediaSource.Path;
state.RunTimeTicks = item.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
state.InputBitrate = mediaSource.Bitrate;
state.InputFileSize = mediaSource.Size;
state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
mediaStreams = mediaSource.MediaStreams;
}
else
{
var hasMediaSources = (IHasMediaSources)item;
var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
? hasMediaSources.GetMediaSources(false).First()
: hasMediaSources.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId));
mediaStreams = mediaSource.MediaStreams;
//.........这里部分代码省略.........
示例14: AttachMediaStreamInfo
internal static void AttachMediaStreamInfo(EncodingJob state,
List<MediaStream> mediaStreams,
EncodingJobOptions videoRequest)
{
if (videoRequest != null)
{
if (string.IsNullOrEmpty(videoRequest.VideoCodec))
{
videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
}
state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);
if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
{
state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
}
if (state.VideoStream != null && state.VideoStream.IsInterlaced)
{
state.DeInterlace = true;
}
EnforceResolutionLimit(state, videoRequest);
}
else
{
state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
}
state.AllMediaStreams = mediaStreams;
}
示例15: GetVideoBitrateParamValue
private int? GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream)
{
var bitrate = request.VideoBitRate;
if (videoStream != null)
{
var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue &&
request.Height.Value > videoStream.Height.Value;
if (request.Width.HasValue && videoStream.Width.HasValue &&
request.Width.Value > videoStream.Width.Value)
{
isUpscaling = true;
}
// Don't allow bitrate increases unless upscaling
if (!isUpscaling)
{
if (bitrate.HasValue && videoStream.BitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value);
}
}
}
return bitrate;
}