本文整理汇总了C#中Microsoft.Kinect.KinectSensor.Open方法的典型用法代码示例。如果您正苦于以下问题:C# KinectSensor.Open方法的具体用法?C# KinectSensor.Open怎么用?C# KinectSensor.Open使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Microsoft.Kinect.KinectSensor
的用法示例。
在下文中一共展示了KinectSensor.Open方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Window_Loaded
// Primary function. Runs when the window loads in.
private void Window_Loaded(object sender, RoutedEventArgs e)
{
_sensor = KinectSensor.GetDefault();
if (_sensor != null)
{
_sensor.Open();
_bodies = new Body[_sensor.BodyFrameSource.BodyCount];
_reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
_reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
_bodyReader = _sensor.BodyFrameSource.OpenReader();
_bodyReader.FrameArrived += BodyReader_FrameArrived;
// 2) Initialize the face source with the desired features
_faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
FaceFrameFeatures.FaceEngagement |
FaceFrameFeatures.Glasses |
FaceFrameFeatures.Happy |
FaceFrameFeatures.LeftEyeClosed |
FaceFrameFeatures.MouthOpen |
FaceFrameFeatures.PointsInColorSpace |
FaceFrameFeatures.RightEyeClosed);
_faceReader = _faceSource.OpenReader();
_faceReader.FrameArrived += FaceReader_FrameArrived;
}
}
示例2: Window_Loaded
private void Window_Loaded( object sender, RoutedEventArgs e )
{
try {
// Kinectを開く
kinect = KinectSensor.GetDefault();
kinect.Open();
// 赤外線画像の情報を取得する
infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription;
// 赤外線リーダーを開く
infraredFrameReader = kinect.InfraredFrameSource.OpenReader();
infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived;
// 表示のためのビットマップに必要なものを作成
infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels];
infraredBitmap = new WriteableBitmap(
infraredFrameDesc.Width, infraredFrameDesc.Height,
96, 96, PixelFormats.Gray16, null );
infraredRect = new Int32Rect( 0, 0,
infraredFrameDesc.Width, infraredFrameDesc.Height );
infraredStride = infraredFrameDesc.Width *
(int)infraredFrameDesc.BytesPerPixel;
ImageInfrared.Source = infraredBitmap;
}
catch ( Exception ex ) {
MessageBox.Show( ex.Message );
Close();
}
}
示例3: RunAsync
//Run the application async
static async Task RunAsync()
{
//Get the default Kinect Sensor
_kinectSensor = KinectSensor.GetDefault();
// open the reader for the body frames
_bodyFrameReader = _kinectSensor.BodyFrameSource.OpenReader();
// Set the coordinate Mapper
_coordinateMapper = _kinectSensor.CoordinateMapper;
//open the sensor
_kinectSensor.Open();
//Check if the Sensor is available
Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort"));
while (!_kinectSensor.IsAvailable)
{
//wait for sensor
}
Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort"));
//Init gesture
_handOverHeadDetector = new HandOverHeadDetector(HandDetectionType.BothHands, HandState.Open);
//Subscribe to completed event
_handOverHeadDetector.GestureCompleteEvent += HandOverHeadDetectorOnGestureCompleteEvent;
//Start reciving kinect Frames
if (_bodyFrameReader != null)
{
_bodyFrameReader.FrameArrived += Reader_FrameArrived;
}
}
示例4: KinectHandler
public KinectHandler()
{
instance = this;
kinectSensor = KinectSensor.GetDefault();
kinectSensor.CoordinateMapper.CoordinateMappingChanged += CoordinateMapper_CoordinateMappingChanged;
kinectSensor.Open();
}
示例5: Window_Loaded
private void Window_Loaded( object sender, RoutedEventArgs e )
{
try {
kinect = KinectSensor.GetDefault();
if ( kinect == null ) {
throw new Exception("Kinectを開けません");
}
kinect.Open();
// 表示のためのデータを作成
depthFrameDesc = kinect.DepthFrameSource.FrameDescription;
// 表示のためのビットマップに必要なものを作成
depthImage = new WriteableBitmap( depthFrameDesc.Width, depthFrameDesc.Height,
96, 96, PixelFormats.Gray16, null );
depthBuffer = new ushort[depthFrameDesc.LengthInPixels];
depthRect = new Int32Rect( 0, 0, depthFrameDesc.Width, depthFrameDesc.Height );
depthStride = (int)(depthFrameDesc.Width * depthFrameDesc.BytesPerPixel);
ImageDepth.Source = depthImage;
// 初期の位置表示座標
depthPoint = new Point( depthFrameDesc.Width / 2, depthFrameDesc.Height / 2 );
// Depthリーダーを開く
depthFrameReader = kinect.DepthFrameSource.OpenReader();
depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
}
catch ( Exception ex ) {
MessageBox.Show( ex.Message );
Close();
}
}
示例6: MainWindow
public MainWindow()
{
_kinectSensor = KinectSensor.GetDefault();
_depthFrameDescription = _kinectSensor.DepthFrameSource.FrameDescription;
_depthFrameReader = _kinectSensor.DepthFrameSource.OpenReader();
_depthFrameReader.FrameArrived += Reader_FrameArrived;
_cameraSpacePoints = new CameraSpacePoint[_depthFrameDescription.Width * _depthFrameDescription.Height];
_trackingDiagnostics = new TrackingDiagnostics();
_heatMap = new HeatMap();
_energyHistory = new EnergyHistory();
_temporalMedianImage = new TemporalMedianImage(GlobVar.TemporalFrameCounter);
_stopwatch = new Stopwatch();
BodiesHistory.Initialize();
GlobVar.CoordinateMapper = _kinectSensor.CoordinateMapper;
GlobVar.TimeStamps = new List<TimeSpan>();
// initialize the components (controls) of the GUI window
InitializeComponent();
_kinectSensor.Open();
}
示例7: MainWindow
public MainWindow()
{
InitializeComponent();
network.init();
_sensor = KinectSensor.GetDefault();
if(_sensor != null)
{
_sensor.Open();
// Identify the bodies
_bodies = new Body[_sensor.BodyFrameSource.BodyCount];
_colorReader = _sensor.ColorFrameSource.OpenReader();
_colorReader.FrameArrived += ColorReader_FrameArrived;
_bodyReader = _sensor.BodyFrameSource.OpenReader();
_bodyReader.FrameArrived += BodyReader_FrameArrived;
// Initialize the face source with the desired features, some are commented out, include later.
_faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);// |
/*
FaceFrameFeatures.FaceEngagement |
FaceFrameFeatures.Glasses |
FaceFrameFeatures.Happy |
FaceFrameFeatures.LeftEyeClosed |
FaceFrameFeatures.MouthOpen |
FaceFrameFeatures.PointsInColorSpace |
FaceFrameFeatures.RightEyeClosed);
*/
_faceReader = _faceSource.OpenReader();
_faceReader.FrameArrived += FaceReader_FrameArrived;
}
}
示例8: MainWindow
public MainWindow()
{
InitializeComponent();
var hubConnection = new HubConnection("http://divewakeweb.azurewebsites.net/");
stockTickerHubProxy = hubConnection.CreateHubProxy("WakeHub");
hubConnection.Start().Wait();
_sensor = KinectSensor.GetDefault();
if (_sensor != null)
{
_sensor.Open();
_bodies = new Body[_sensor.BodyFrameSource.BodyCount];
_colorReader = _sensor.ColorFrameSource.OpenReader();
_colorReader.FrameArrived += ColorReader_FrameArrived;
_bodyReader = _sensor.BodyFrameSource.OpenReader();
_bodyReader.FrameArrived += BodyReader_FrameArrived;
// 2) Initialize the face source with the desired features
_faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
FaceFrameFeatures.FaceEngagement |
FaceFrameFeatures.Glasses |
FaceFrameFeatures.LeftEyeClosed |
FaceFrameFeatures.PointsInColorSpace |
FaceFrameFeatures.RightEyeClosed);
_faceReader = _faceSource.OpenReader();
_faceReader.FrameArrived += FaceReader_FrameArrived;
}
}
示例9: MainWindow
public MainWindow()
{
// Get the sensor
sensor = KinectSensor.GetDefault();
sensor.Open();
// Setup readers for each source of data we want to use
colorFrameReader = sensor.ColorFrameSource.OpenReader();
bodyFrameReader = sensor.BodyFrameSource.OpenReader();
// Setup event handlers that use what we get from the readers
colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
// Get ready to draw graphics
drawingGroup = new DrawingGroup();
// Initialize the components (controls) of the window
InitializeComponent();
// Initialize color components
// create the bitmap to display
colorBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Bgr32, null);
ColorImage.Source = colorBitmap;
// Initialize the game components
birdHeight = this.Height / 2; // put the bird in the middle of the screen
prevRightHandHeight = 0;
prevLeftHandHeight = 0;
pipeX = -1;
pipeGapY = 250;
pipeGapLength = 170;
randomGenerator = new Random();
}
示例10: KinectStreamer
private KinectStreamer()
{
KinectStreamerConfig = new KinectStreamerConfig();
kinectSensor = KinectSensor.GetDefault();
CoordinateMapper = kinectSensor.CoordinateMapper;
multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.Body);
multiSourceFrameReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
ColorFrameDescription = kinectSensor.ColorFrameSource.FrameDescription;
DepthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription;
depthBitmap = new WriteableBitmap(DepthFrameDescription.Width, DepthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);
colorBitmap = new WriteableBitmap(ColorFrameDescription.Width, ColorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
bitmapBackBufferSize = (uint)((colorBitmap.BackBufferStride * (colorBitmap.PixelHeight - 1)) + (colorBitmap.PixelWidth * this.bytesPerPixel));
colorPixels = new byte[ColorFrameDescription.Width * ColorFrameDescription.Height];
depthPixels = new byte[DepthFrameDescription.Width * DepthFrameDescription.Height];
depthArray = new ushort[DepthFrameDescription.Width * DepthFrameDescription.Height];
SetupBody();
kinectSensor.Open();
}
示例11: KinectController
public KinectController()
{
kinectSensor = KinectSensor.GetDefault();
// open the reader for the body frames
bodyReader = kinectSensor.BodyFrameSource.OpenReader();
kinectSensor.Open();
Arm = ArmPointing.Nothing;
hasPointed = false;
lastAveragePositionLeft = 0f;
lastAveragePositionRight = 0f;
frameCounterLeft = 0;
frameCounterRight = 0;
if (!File.Exists(OPT_FILE))
{
offsetX = 0;
offsetY = 0;
} else
{
string data = File.ReadAllText(OPT_FILE);
List <float> offset = JsonConvert.DeserializeObject<List<float>>(data);
offsetX = offset[0];
offsetY = offset[1];
}
}
示例12: MainWindow
/// <summary>
/// The main window of the app.
/// </summary>
public MainWindow()
{
InitializeComponent();
_sensor = KinectSensor.GetDefault();
if (_sensor != null)
{
_depthReader = _sensor.DepthFrameSource.OpenReader();
_depthReader.FrameArrived += DepthReader_FrameArrived;
_infraredReader = _sensor.InfraredFrameSource.OpenReader();
_infraredReader.FrameArrived += InfraredReader_FrameArrived;
_bodyReader = _sensor.BodyFrameSource.OpenReader();
_bodyReader.FrameArrived += BodyReader_FrameArrived;
_bodies = new Body[_sensor.BodyFrameSource.BodyCount];
// Initialize the HandsController and subscribe to the HandsDetected event.
_handsController = new HandsController();
_handsController.HandsDetected += HandsController_HandsDetected;
_sensor.Open();
}
}
示例13: MainWindow
public MainWindow()
{
InitializeComponent();
_sensor = KinectSensor.GetDefault();
if (_sensor != null)
{
_sensor.Open();
_bodies = new Body[_sensor.BodyFrameSource.BodyCount];
_colorReader = _sensor.ColorFrameSource.OpenReader();
_colorReader.FrameArrived += ColorReader_FrameArrived;
_bodyReader = _sensor.BodyFrameSource.OpenReader();
_bodyReader.FrameArrived += BodyReader_FrameArrived;
// 2) Initialize the face source with the desired features
_faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
FaceFrameFeatures.FaceEngagement |
FaceFrameFeatures.Glasses |
FaceFrameFeatures.Happy |
FaceFrameFeatures.LeftEyeClosed |
FaceFrameFeatures.MouthOpen |
FaceFrameFeatures.PointsInColorSpace |
FaceFrameFeatures.RightEyeClosed |
FaceFrameFeatures.LookingAway);
_faceReader = _faceSource.OpenReader();
_faceReader.FrameArrived += FaceReader_FrameArrived;
}
}
示例14: KinectReader
public KinectReader(WriteableBitmap depthBitmap, WriteableBitmap colorBitmap)
{
this.depthBitmap = depthBitmap;
this.colorBitmap = colorBitmap;
this.sensor = KinectSensor.GetDefault();
sensor.Open();
this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color);
reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
}
示例15: Core
public Core()
{
InitExercises();
sensor = KinectSensor.GetDefault();
sensor.Open();
reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
frameLock = new Mutex();
}