本文整理汇总了C#中System.Drawing.Bitmap.ToBitmapSource方法的典型用法代码示例。如果您正苦于以下问题:C# System.Drawing.Bitmap.ToBitmapSource方法的具体用法?C# System.Drawing.Bitmap.ToBitmapSource怎么用?C# System.Drawing.Bitmap.ToBitmapSource使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类System.Drawing.Bitmap
的用法示例。
在下文中一共展示了System.Drawing.Bitmap.ToBitmapSource方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ToBitmapSource
/// <summary>
/// Converts a <see cref="System.Drawing.Image"/> into a WPF <see cref="BitmapSource"/>.
/// </summary>
/// <param name="image">The image image.</param>
/// <returns>A BitmapSource</returns>
public static BitmapSource ToBitmapSource(this System.Drawing.Image image)
{
using (var bitmap = new System.Drawing.Bitmap(image))
{
return bitmap.ToBitmapSource();
}
}
示例2: sensor_AllFramesReady
void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
{
using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
{
if (depthFrame == null)
{
return;
}
int minDepth = 850;
int maxDepth = 4000;
System.Drawing.Bitmap outBmp = new System.Drawing.Bitmap(160, 160);
BitmapSource depthBitmapSource;
BitmapSource processedBitmapSource;
//Get the position of interest on the depthmap from skeletal tracking
DepthImagePoint rightHandPoint = jointTracker.GetJointPosition(kinectSensorChooser.Kinect, e, JointType.HandRight);
if (jointTracker.JointDetected == true)
{
textResult.Text = "Right hand is being tracked";
int rightHandDepth = rightHandPoint.Depth;
if (rightHandDepth < 850)
{
minDepth = 850;
maxDepth = 1500;
}
else
{
minDepth = rightHandDepth - 75;
maxDepth = rightHandDepth + 75;
}
depthBitmapSource = sliceDepthImage(depthFrame, minDepth, maxDepth);
//Create a bitmap from the depth information
System.Drawing.Bitmap depthBmp = depthBitmapSource.ToBitmap();
//Aforge performs image processing here.
outBmp = imageProcessor.ProcessFrame(depthBmp, rightHandPoint.X, rightHandPoint.Y);
}
else
{
textResult.Text = "No hand detected";
//depthBitmapSource = sliceDepthImage(depthFrame, 850, 1500);
System.Drawing.Graphics g = System.Drawing.Graphics.FromImage(outBmp);
g.Clear(System.Drawing.Color.Black);
}
//Create a bitmapsource to show the processed image
processedBitmapSource = outBmp.ToBitmapSource();
//Display the images
procImageDisplay.Source = processedBitmapSource;
}
}
示例3: sensor_AllFramesReady
void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
{
using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
{
if (depthFrame == null)
{
return;
}
if (sliderMinDist.Value > sliderMaxDist.Value)
{
sliderMinDist.Value = sliderMaxDist.Value;
}
textMinDistVal.Text = sliderMinDist.Value.ToString();
textMaxDistVal.Text = sliderMaxDist.Value.ToString();
BitmapSource depthBitmapSource = sliceDepthImage(depthFrame, (int)sliderMinDist.Value, (int)sliderMaxDist.Value);
//Create a bitmap from the depth information
System.Drawing.Bitmap depthBmp = depthBitmapSource.ToBitmap();
System.Drawing.Bitmap outBmp = new System.Drawing.Bitmap(depthBmp.Width, depthBmp.Height);
//Aforge performs image processing here.
outBmp = blobsDetector.ProcessFrame(depthBmp, trackMode);
textResult.Text = blobsDetector.TotalBlobCount + " blobs detected.";
//Create a bitmapsource to show the processed image
BitmapSource procBitmapSource = outBmp.ToBitmapSource();
//Display the images
depthImageDisplay.Source = depthBitmapSource;
procImageDisplay.Source = procBitmapSource;
}
}