本文整理汇总了C#中ImageFrameReadyEventArgs类的典型用法代码示例。如果您正苦于以下问题:C# ImageFrameReadyEventArgs类的具体用法?C# ImageFrameReadyEventArgs怎么用?C# ImageFrameReadyEventArgs使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ImageFrameReadyEventArgs类属于命名空间,在下文中一共展示了ImageFrameReadyEventArgs类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: nui_ColorFrameReady
void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
{
// 32-bit per pixel, RGBA image
PlanarImage Image = e.ImageFrame.Image;
video.Source = BitmapSource.Create(
Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
}
示例2: DepthFrameReady
private void DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
{
PlanarImage p = e.ImageFrame.Image;
Color[] DepthColor = new Color[p.Height * p.Width];
float maxDist = 4000;
float minDist = 850;
float distOffset = maxDist - minDist;
depthImg = new Texture2D(GraphicsDevice, p.Width, p.Height);
int index = 0;
for (int y = 0; y < p.Height; y++)
{
for (int x = 0; x < p.Width; x++, index += 2)
{
int n = (y * p.Width + x) * 2;
int distance = (p.Bits[n + 0] | p.Bits[n + 1] << 8);
if (y == 100)
Console.Write(distance + ", ");
byte intensity = (byte)(255 - (255 * Math.Max(distance - minDist, 0) / (distOffset)));
DepthColor[y * p.Width + x] = new Color(intensity, intensity, intensity);
}
}
depthImg.SetData(DepthColor);
}
示例3: nui_VideoFrameReady
//****************************//
void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs evt)
{
PlanarImage imgKinect = evt.ImageFrame.Image;
imageRGB.Source = BitmapSource.Create(imgKinect.Width, imgKinect.Height, 96, 96, PixelFormats.Bgr32,
null, imgKinect.Bits, imgKinect.Width * imgKinect.BytesPerPixel);
}
示例4: nui_DepthFrameReady
/// <summary>Fires when a depth frame is ready.</summary>
protected void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
{
// Convert depth frame to video frame to render it
PlanarImage Image = e.ImageFrame.Image;
byte[] convertedDepthFrame = util.convertDepthFrame(Image.Bits, ref depthFrame32);
image2.Source = BitmapSource.Create(Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);
// Clear extraneous canvas elements (bones)
if (canvas1.Children.Count > 9 && handler.JointHistory[(int)JointID.HandLeft].Count > 0 && handler.JointHistory[(int)JointID.HandRight].Count > 0)
canvas1.Children.RemoveRange(9, canvas1.Children.Count - 9);
// Draw hand tracking lines and circle
/*if (handler.JointHistory[(int)JointID.HandLeft].Count > 0)
DrawCircle(handler.JointHistory[(int)JointID.HandLeft].Last());
if (handler.JointHistory[(int)JointID.HandRight].Count > 0)
DrawCircle(handler.JointHistory[(int)JointID.HandRight].Last());
for (int i = 0; i < handler.JointHistory[(int)JointID.HandLeft].Count - 1; i++)
DrawLine(handler.JointHistory[(int)JointID.HandLeft][i], handler.JointHistory[(int)JointID.HandLeft][i + 1]);
for (int i = 0; i < handler.JointHistory[(int)JointID.HandRight].Count - 1; i++)
DrawLine(handler.JointHistory[(int)JointID.HandRight][i], handler.JointHistory[(int)JointID.HandRight][i + 1]);*/
// Calculate FPS
++totalFrames;
if (lastTime < DateTime.Now.AddSeconds(-1)) {
int frameDiff = totalFrames - lastFrames;
lastFrames = totalFrames;
lastTime = DateTime.Now;
Title = "KinectNUI - " + frameDiff.ToString() + " FPS"; }
}
示例5: nui_VideoFrameReady
public void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
{
// Dump video stream to the Image element
PlanarImage Image = e.ImageFrame.Image;
image.Source = BitmapSource.Create(Image.Width, Image.Height, 96, 96,
PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
}
示例6: Update
public void Update(ImageFrameReadyEventArgs e)
{
if (depthFrame32 == null)
{
depthFrame32 = new byte[e.ImageFrame.Image.Width * e.ImageFrame.Image.Height * 4];
}
ConvertDepthFrame(e.ImageFrame.Image.Bits);
if (DepthBitmap == null)
{
DepthBitmap = new WriteableBitmap(e.ImageFrame.Image.Width, e.ImageFrame.Image.Height, 96, 96, PixelFormats.Bgra32, null);
}
DepthBitmap.Lock();
int stride = DepthBitmap.PixelWidth * DepthBitmap.Format.BitsPerPixel / 8;
Int32Rect dirtyRect = new Int32Rect(0, 0, DepthBitmap.PixelWidth, DepthBitmap.PixelHeight);
DepthBitmap.WritePixels(dirtyRect, depthFrame32, stride, 0);
DepthBitmap.AddDirtyRect(dirtyRect);
DepthBitmap.Unlock();
RaisePropertyChanged(()=>DepthBitmap);
}
示例7: kinect_DepthFrameReady
void kinect_DepthFrameReady( object sender, ImageFrameReadyEventArgs e )
{
var source = e.ImageFrame.Image;
image1.Source = BitmapSource.Create( source.Width, source.Height, 96, 96,
PixelFormats.Gray16, null, ConvertGrayScale( source ).Bits,
source.Width * source.BytesPerPixel );
}
示例8: runtime_VideoFrameReady
void runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
{
if (this.VideoFrameReady != null)
{
this.VideoFrameReady(this, e);
}
}
示例9: runtime_DepthFrameReady
void runtime_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
{
PlanarImage image = e.ImageFrame.Image;
BitmapSource source = BitmapSource.Create(image.Width, image.Height, 96, 96,
PixelFormats.Gray16, null, image.Bits, image.Width * image.BytesPerPixel);
depthImage.Source = source;
}
示例10: Update
public BitmapSource Update(ImageFrameReadyEventArgs e)
{
PlanarImage Image = e.ImageFrame.Image;
ColorBitmap = BitmapSource.Create(Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
RaisePropertyChanged(()=>ColorBitmap);
return ColorBitmap;
}
示例11: RuntimeColorFrameReady
void RuntimeColorFrameReady(object sender, ImageFrameReadyEventArgs e)
{
ColorImage.Source = e.ImageFrame.ToBitmapSource();
if (_saveColorFrame)
{
_saveColorFrame = false;
e.ImageFrame.ToBitmapSource().Save(DateTime.Now.ToString("yyyyMMddHHmmss") + "_color.jpg", ImageFormat.Jpeg);
}
}
示例12: kinect_VideoFrameReady
void kinect_VideoFrameReady( object sender, ImageFrameReadyEventArgs e )
{
// 抜かれた瞬間のKINECTは、InstanceIndex が -1 になる
Runtime kinect = sender as Runtime;
if ( (kinect != null) && (kinect.InstanceIndex >= 0) ) {
PlanarImage srouce = e.ImageFrame.Image;
Image dest = images[kinect.InstanceIndex];
dest.Source = BitmapSource.Create( srouce.Width, srouce.Height, 96, 96,
PixelFormats.Bgr32, null, srouce.Bits, srouce.Width * srouce.BytesPerPixel );
}
}
示例13: nui_DepthFrameReady
void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
{
//Convert depth information for a pixel into color information
byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame);
//create an image based on returned colors
PlanarImage image = e.ImageFrame.Image;
image1.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
ColoredBytes, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
}
示例14: nui_VideoFrameReady
/// <summary>
/// Event handler code for when the RGB camerastream is ready.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
{
try
{
PlanarImage image = e.ImageFrame.Image;
rgbImage.Source = BitmapSource.Create(image.Width, image.Height, 6, 6,
PixelFormats.Bgr32, null, image.Bits, image.Width * image.BytesPerPixel);
}
catch (Exception ex)
{
rgbImage = null;
MessageBox.Show(ex.Message);
}
}
示例15: ColorImageReady
void ColorImageReady(object sender, ImageFrameReadyEventArgs e)
{
PlanarImage planarImage = e.ImageFrame.Image;
//An interopBitmap is a WPF construct that enables resetting the Bits of the image.
//This is more efficient than doing a BitmapSource.Create call every frame.
if (imageHelper == null)
{
imageHelper = new InteropBitmapHelper(planarImage.Width, planarImage.Height, planarImage.Bits);
kinectColorImage.Source = imageHelper.InteropBitmap;
}
else
{
imageHelper.UpdateBits(planarImage.Bits);
}
}