本文整理汇总了C#中MediaBrowser.Api.Playback.StreamState类的典型用法代码示例。如果您正苦于以下问题:C# StreamState类的具体用法?C# StreamState怎么用?C# StreamState使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
StreamState类属于MediaBrowser.Api.Playback命名空间,在下文中一共展示了StreamState类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: AcquireResources
private async Task AcquireResources(StreamState state, CancellationTokenSource cancellationTokenSource)
{
if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
{
state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationTokenSource.Token).ConfigureAwait(false);
}
if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.Request.LiveStreamId))
{
var liveStreamResponse = await MediaSourceManager.OpenLiveStream(new LiveStreamRequest
{
OpenToken = state.MediaSource.OpenToken
}, false, cancellationTokenSource.Token).ConfigureAwait(false);
AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.VideoRequest, state.RequestedUrl);
if (state.VideoRequest != null)
{
TryStreamCopy(state, state.VideoRequest);
}
}
if (state.MediaSource.BufferMs.HasValue)
{
await Task.Delay(state.MediaSource.BufferMs.Value, cancellationTokenSource.Token).ConfigureAwait(false);
}
}
示例2: StartFfMpeg
/// <summary>
/// Starts the FFMPEG.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputPath">The output path.</param>
/// <param name="cancellationTokenSource">The cancellation token source.</param>
/// <returns>Task.</returns>
/// <exception cref="System.InvalidOperationException">ffmpeg was not found at + MediaEncoder.EncoderPath</exception>
protected async Task StartFfMpeg(StreamState state, string outputPath, CancellationTokenSource cancellationTokenSource)
{
if (!File.Exists(MediaEncoder.EncoderPath))
{
throw new InvalidOperationException("ffmpeg was not found at " + MediaEncoder.EncoderPath);
}
Directory.CreateDirectory(Path.GetDirectoryName(outputPath));
await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false);
var commandLineArgs = GetCommandLineArguments(outputPath, state, true);
if (ServerConfigurationManager.Configuration.EnableDebugEncodingLogging)
{
commandLineArgs = "-loglevel debug " + commandLineArgs;
}
var process = new Process
{
StartInfo = new ProcessStartInfo
{
CreateNoWindow = true,
UseShellExecute = false,
// Must consume both stdout and stderr or deadlocks may occur
RedirectStandardOutput = true,
RedirectStandardError = true,
RedirectStandardInput = true,
FileName = MediaEncoder.EncoderPath,
WorkingDirectory = Path.GetDirectoryName(MediaEncoder.EncoderPath),
Arguments = commandLineArgs,
WindowStyle = ProcessWindowStyle.Hidden,
ErrorDialog = false
},
EnableRaisingEvents = true
};
ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath,
TranscodingJobType,
process,
state.Request.DeviceId,
state,
cancellationTokenSource);
var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
Logger.Info(commandLineLogMessage);
var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, "transcode-" + Guid.NewGuid() + ".txt");
Directory.CreateDirectory(Path.GetDirectoryName(logFilePath));
// FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
state.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);
var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(commandLineLogMessage + Environment.NewLine + Environment.NewLine);
await state.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false);
process.Exited += (sender, args) => OnFfMpegProcessExited(process, state, outputPath);
try
{
process.Start();
}
catch (Exception ex)
{
Logger.ErrorException("Error starting ffmpeg", ex);
ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state);
throw;
}
// MUST read both stdout and stderr asynchronously or a deadlock may occurr
process.BeginOutputReadLine();
// Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
StartStreamingLog(state, process.StandardError.BaseStream, state.LogFileStream);
// Wait for the file to exist before proceeeding
while (!File.Exists(outputPath))
{
await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false);
}
}
示例3: GetCommandLineArguments
/// <summary>
/// Gets the command line arguments.
/// </summary>
/// <param name="outputPath">The output path.</param>
/// <param name="state">The state.</param>
/// <param name="isEncoding">if set to <c>true</c> [is encoding].</param>
/// <returns>System.String.</returns>
protected abstract string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding);
示例4: GetGraphicalSubtitleParam
/// <summary>
/// Gets the internal graphical subtitle param.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <returns>System.String.</returns>
protected string GetGraphicalSubtitleParam(StreamState state, string outputVideoCodec)
{
var outputSizeParam = string.Empty;
var request = state.VideoRequest;
// Add resolution params, if specified
if (request.Width.HasValue || request.Height.HasValue || request.MaxHeight.HasValue || request.MaxWidth.HasValue)
{
outputSizeParam = GetOutputSizeParam(state, outputVideoCodec, CancellationToken.None).TrimEnd('"');
outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("scale", StringComparison.OrdinalIgnoreCase));
}
var videoSizeParam = string.Empty;
if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue)
{
videoSizeParam = string.Format(",scale={0}:{1}", state.VideoStream.Width.Value.ToString(UsCulture), state.VideoStream.Height.Value.ToString(UsCulture));
}
return string.Format(" -filter_complex \"[0:{0}]format=yuva444p{3},lut=u=128:v=128:y=gammaval(.3)[sub] ; [0:{1}] [sub] overlay{2}\"",
state.SubtitleStream.Index,
state.VideoStream.Index,
outputSizeParam,
videoSizeParam);
}
示例5: GetInputArgument
/// <summary>
/// Gets the input argument.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected virtual string GetInputArgument(StreamState state)
{
var protocol = state.InputProtocol;
var inputPath = new[] { state.MediaPath };
if (state.IsInputVideo)
{
if (!(state.VideoType == VideoType.Iso && state.IsoMount == null))
{
inputPath = MediaEncoderHelpers.GetInputArgument(state.MediaPath, state.InputProtocol, state.IsoMount, state.PlayableStreamFileNames);
}
}
return MediaEncoder.GetInputArgument(inputPath, protocol);
}
示例6: GetVideoQualityParam
/// <summary>
/// Gets the video bitrate to specify on the command line
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoCodec">The video codec.</param>
/// <param name="isHls">if set to <c>true</c> [is HLS].</param>
/// <returns>System.String.</returns>
protected string GetVideoQualityParam(StreamState state, string videoCodec, bool isHls)
{
var param = string.Empty;
var isVc1 = state.VideoStream != null &&
string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);
var qualitySetting = GetQualitySetting();
if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase))
{
switch (qualitySetting)
{
case EncodingQuality.HighSpeed:
param = "-preset superfast";
break;
case EncodingQuality.HighQuality:
param = "-preset superfast";
break;
case EncodingQuality.MaxQuality:
param = "-preset superfast";
break;
}
switch (qualitySetting)
{
case EncodingQuality.HighSpeed:
param += " -crf 23";
break;
case EncodingQuality.HighQuality:
param += " -crf 20";
break;
case EncodingQuality.MaxQuality:
param += " -crf 18";
break;
}
}
// webm
else if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// Values 0-3, 0 being highest quality but slower
var profileScore = 0;
string crf;
switch (qualitySetting)
{
case EncodingQuality.HighSpeed:
crf = "12";
profileScore = 2;
break;
case EncodingQuality.HighQuality:
crf = "8";
profileScore = 1;
break;
case EncodingQuality.MaxQuality:
crf = "4";
break;
default:
throw new ArgumentException("Unrecognized quality setting");
}
if (isVc1)
{
profileScore++;
// Max of 2
profileScore = Math.Min(profileScore, 2);
}
// http://www.webmproject.org/docs/encoder-parameters/
param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1}",
profileScore.ToString(UsCulture),
crf);
}
else if (string.Equals(videoCodec, "mpeg4", StringComparison.OrdinalIgnoreCase))
{
param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
}
// asf/wmv
else if (string.Equals(videoCodec, "wmv2", StringComparison.OrdinalIgnoreCase))
{
param = "-qmin 2";
}
else if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
param = "-mbd 2";
}
param += GetVideoBitrateParam(state, videoCodec, isHls);
//.........这里部分代码省略.........
示例7: GetOutputSizeParam
/// <summary>
/// If we're going to put a fixed size on the command line, this will calculate it
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>System.String.</returns>
protected string GetOutputSizeParam(StreamState state,
string outputVideoCodec,
CancellationToken cancellationToken,
bool allowTimeStampCopy = true)
{
// http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/
var request = state.VideoRequest;
var filters = new List<string>();
if (state.DeInterlace)
{
filters.Add("yadif=0:-1:0");
}
// If fixed dimensions were supplied
if (request.Width.HasValue && request.Height.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
var heightParam = request.Height.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
}
// If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(min(iw\\,{0})/2)*2:trunc(min((iw/dar)\\,{1})/2)*2", maxWidthParam, maxHeightParam));
}
// If a fixed width was requested
else if (request.Width.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
}
// If a fixed height was requested
else if (request.Height.HasValue)
{
var heightParam = request.Height.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(oh*a*2)/2:{0}", heightParam));
}
// If a max width was requested
else if (request.MaxWidth.HasValue && (!request.MaxHeight.HasValue || state.VideoStream == null))
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
filters.Add(string.Format("scale=min(iw\\,{0}):trunc(ow/dar/2)*2", maxWidthParam));
}
// If a max height was requested
else if (request.MaxHeight.HasValue && (!request.MaxWidth.HasValue || state.VideoStream == null))
{
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(oh*a*2)/2:min(ih\\,{0})", maxHeightParam));
}
else if (request.MaxWidth.HasValue ||
request.MaxHeight.HasValue ||
request.Width.HasValue ||
request.Height.HasValue)
{
if (state.VideoStream != null)
{
// Need to perform calculations manually
// Try to account for bad media info
var currentHeight = state.VideoStream.Height ?? request.MaxHeight ?? request.Height ?? 0;
var currentWidth = state.VideoStream.Width ?? request.MaxWidth ?? request.Width ?? 0;
var outputSize = DrawingUtils.Resize(currentWidth, currentHeight, request.Width, request.Height, request.MaxWidth, request.MaxHeight);
var manualWidthParam = outputSize.Width.ToString(UsCulture);
var manualHeightParam = outputSize.Height.ToString(UsCulture);
filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", manualWidthParam, manualHeightParam));
}
}
var output = string.Empty;
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream)
{
//.........这里部分代码省略.........
示例8: GetFramerateParam
protected double? GetFramerateParam(StreamState state)
{
if (state.VideoRequest != null)
{
if (state.VideoRequest.Framerate.HasValue)
{
return state.VideoRequest.Framerate.Value;
}
var maxrate = state.VideoRequest.MaxFramerate ?? 23.97602;
if (state.VideoStream != null)
{
var contentRate = state.VideoStream.AverageFrameRate ?? state.VideoStream.RealFrameRate;
if (contentRate.HasValue && contentRate.Value > maxrate)
{
return maxrate;
}
}
}
return null;
}
示例9: GetOutputFilePath
/// <summary>
/// Gets the output file path.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
private string GetOutputFilePath(StreamState state)
{
var folder = ServerConfigurationManager.ApplicationPaths.TranscodingTempPath;
var outputFileExtension = GetOutputFileExtension(state);
var data = GetCommandLineArguments("dummy\\dummy", state, false);
data += "-" + (state.Request.DeviceId ?? string.Empty);
return Path.Combine(folder, data.GetMD5().ToString("N") + (outputFileExtension ?? string.Empty).ToLower());
}
示例10: GetUserAgentParam
/// <summary>
/// Gets the user agent param.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
private string GetUserAgentParam(StreamState state)
{
string useragent = null;
state.RemoteHttpHeaders.TryGetValue("User-Agent", out useragent);
if (!string.IsNullOrWhiteSpace(useragent))
{
return "-user-agent \"" + useragent + "\"";
}
return string.Empty;
}
示例11: OnFfMpegProcessExited
/// <summary>
/// Processes the exited.
/// </summary>
/// <param name="process">The process.</param>
/// <param name="state">The state.</param>
/// <param name="outputPath">The output path.</param>
private void OnFfMpegProcessExited(Process process, StreamState state, string outputPath)
{
var job = ApiEntryPoint.Instance.GetTranscodingJob(outputPath, TranscodingJobType);
if (job != null)
{
job.HasExited = true;
}
Logger.Debug("Disposing stream resources");
state.Dispose();
try
{
Logger.Info("FFMpeg exited with code {0}", process.ExitCode);
}
catch
{
Logger.Error("FFMpeg exited with an error.");
}
// This causes on exited to be called twice:
//try
//{
// // Dispose the process
// process.Dispose();
//}
//catch (Exception ex)
//{
// Logger.ErrorException("Error disposing ffmpeg.", ex);
//}
}
示例12: GetOutputFileExtension
/// <summary>
/// Gets the output file extension.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected virtual string GetOutputFileExtension(StreamState state)
{
return Path.GetExtension(state.RequestedUrl);
}
示例13: GetVideoBitrateParam
protected string GetVideoBitrateParam(StreamState state, string videoCodec, bool isHls)
{
var bitrate = state.OutputVideoBitrate;
if (bitrate.HasValue)
{
var hasFixedResolution = state.VideoRequest.HasFixedResolution;
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
if (hasFixedResolution)
{
return string.Format(" -minrate:v ({0}*.90) -maxrate:v ({0}*1.10) -bufsize:v {0} -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// H264
if (hasFixedResolution)
{
if (isHls)
{
return string.Format(" -b:v {0} -maxrate ({0}*.80) -bufsize {0}", bitrate.Value.ToString(UsCulture));
}
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
return string.Format(" -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
}
return string.Empty;
}
示例14: StartFfMpeg
/// <summary>
/// Starts the FFMPEG.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputPath">The output path.</param>
/// <param name="cancellationTokenSource">The cancellation token source.</param>
/// <param name="workingDirectory">The working directory.</param>
/// <returns>Task.</returns>
protected async Task<TranscodingJob> StartFfMpeg(StreamState state,
string outputPath,
CancellationTokenSource cancellationTokenSource,
string workingDirectory = null)
{
FileSystem.CreateDirectory(Path.GetDirectoryName(outputPath));
await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false);
var transcodingId = Guid.NewGuid().ToString("N");
var commandLineArgs = GetCommandLineArguments(outputPath, state, true);
if (ApiEntryPoint.Instance.GetEncodingOptions().EnableDebugLogging)
{
commandLineArgs = "-loglevel debug " + commandLineArgs;
}
var process = new Process
{
StartInfo = new ProcessStartInfo
{
CreateNoWindow = true,
UseShellExecute = false,
// Must consume both stdout and stderr or deadlocks may occur
RedirectStandardOutput = true,
RedirectStandardError = true,
RedirectStandardInput = true,
FileName = MediaEncoder.EncoderPath,
Arguments = commandLineArgs,
WindowStyle = ProcessWindowStyle.Hidden,
ErrorDialog = false
},
EnableRaisingEvents = true
};
if (!string.IsNullOrWhiteSpace(workingDirectory))
{
process.StartInfo.WorkingDirectory = workingDirectory;
}
var transcodingJob = ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath,
state.Request.PlaySessionId,
state.MediaSource.LiveStreamId,
transcodingId,
TranscodingJobType,
process,
state.Request.DeviceId,
state,
cancellationTokenSource);
var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
Logger.Info(commandLineLogMessage);
var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, "transcode-" + Guid.NewGuid() + ".txt");
FileSystem.CreateDirectory(Path.GetDirectoryName(logFilePath));
// FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
state.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);
var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(Request.AbsoluteUri + Environment.NewLine + Environment.NewLine + JsonSerializer.SerializeToString(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine);
await state.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false);
process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state);
try
{
process.Start();
}
catch (Exception ex)
{
Logger.ErrorException("Error starting ffmpeg", ex);
ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state);
throw;
}
// MUST read both stdout and stderr asynchronously or a deadlock may occurr
process.BeginOutputReadLine();
// Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
StartStreamingLog(transcodingJob, state, process.StandardError.BaseStream, state.LogFileStream);
// Wait for the file to exist before proceeeding
while (!FileSystem.FileExists(state.WaitForPath ?? outputPath) && !transcodingJob.HasExited)
{
await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false);
}
//.........这里部分代码省略.........
示例15: GetInputModifier
protected string GetInputModifier(StreamState state, bool genPts = true)
{
var inputModifier = string.Empty;
var probeSize = GetProbeSizeArgument(state);
inputModifier += " " + probeSize;
inputModifier = inputModifier.Trim();
var userAgentParam = GetUserAgentParam(state);
if (!string.IsNullOrWhiteSpace(userAgentParam))
{
inputModifier += " " + userAgentParam;
}
inputModifier = inputModifier.Trim();
inputModifier += " " + GetFastSeekCommandLineParameter(state.Request);
inputModifier = inputModifier.Trim();
if (state.VideoRequest != null && genPts)
{
inputModifier += " -fflags +genpts";
}
if (!string.IsNullOrEmpty(state.InputAudioSync))
{
inputModifier += " -async " + state.InputAudioSync;
}
if (!string.IsNullOrEmpty(state.InputVideoSync))
{
inputModifier += " -vsync " + state.InputVideoSync;
}
if (state.ReadInputAtNativeFramerate)
{
inputModifier += " -re";
}
return inputModifier;
}