本文整理匯總了C#中SharpDX.Direct3D11.Device.Dispose方法的典型用法代碼示例。如果您正苦於以下問題:C# Device.Dispose方法的具體用法?C# Device.Dispose怎麽用?C# Device.Dispose使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類SharpDX.Direct3D11.Device
的用法示例。
在下文中一共展示了Device.Dispose方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的C#代碼示例。
示例1: Main
public static void Main(string[] args)
{
Device device;
Effect effect;
device = new Device(DriverType.Hardware, DeviceCreationFlags.None);
if (args.Length < 1)
{
Console.WriteLine("Usage: dumptech.exe <effect file>");
return;
}
string effect_file = args[0];
if (! File.Exists(effect_file))
{
Console.WriteLine("File not found: " + effect_file);
return;
}
try
{
var shader_bytecode = ShaderBytecode.FromFile(effect_file);
effect = new Effect(device, shader_bytecode);
}
catch (SharpDX.CompilationException e)
{
Console.WriteLine(e.Message + ": " + effect_file);
return;
}
//Console.WriteLine("technique count {0}", effect.Description.TechniqueCount);
for (int i = 0; i < effect.Description.TechniqueCount; i++)
{
var technique = effect.GetTechniqueByIndex(i);
Console.WriteLine("{0}\t{1}", i, technique.Description.Name);
}
if (effect != null)
effect.Dispose();
if (device != null)
device.Dispose();
}
示例2: CaptureScreen
public static Bitmap CaptureScreen()
{
// # of graphics card adapter
const int numAdapter = 0;
// # of output device (i.e. monitor)
const int numOutput = 1;
// Create DXGI Factory1
var factory = new Factory1();
var adapter = factory.GetAdapter1(numAdapter);
// Create device from Adapter
var device = new Device(adapter);
// Get DXGI.Output
var output = adapter.GetOutput(numOutput);
var output1 = output.QueryInterface<Output1>();
// Width/Height of desktop to capture
int width = ((Rectangle)output.Description.DesktopBounds).Width;
//width = 1024;
int height = ((Rectangle)output.Description.DesktopBounds).Height;
//height = 1024;
// Create Staging texture CPU-accessible
var textureDesc = new Texture2DDescription
{
CpuAccessFlags = CpuAccessFlags.Read,
BindFlags = BindFlags.None,
Format = Format.B8G8R8A8_UNorm,
Width = width,
Height = height,
OptionFlags = ResourceOptionFlags.None,
MipLevels = 1,
ArraySize = 1,
SampleDescription = { Count = 1, Quality = 0 },
Usage = ResourceUsage.Staging
};
var screenTexture = new Texture2D(device, textureDesc);
// Duplicate the output
var duplicatedOutput = output1.DuplicateOutput(device);
bool captureDone = false;
Bitmap bitmap = null;
for (int i = 0; !captureDone; i++)
{
try
{
SharpDX.DXGI.Resource screenResource;
OutputDuplicateFrameInformation duplicateFrameInformation;
// Try to get duplicated frame within given time
duplicatedOutput.AcquireNextFrame(10000, out duplicateFrameInformation, out screenResource);
if (i > 0)
{
// copy resource into memory that can be accessed by the CPU
using (var screenTexture2D = screenResource.QueryInterface<Texture2D>())
device.ImmediateContext.CopyResource(screenTexture2D, screenTexture);
// Get the desktop capture texture
var mapSource = device.ImmediateContext.MapSubresource(screenTexture, 0, MapMode.Read, MapFlags.None);
// Create Drawing.Bitmap
bitmap = new System.Drawing.Bitmap(width, height, PixelFormat.Format32bppArgb);
var boundsRect = new System.Drawing.Rectangle(0, 0, width, height);
// Copy pixels from screen capture Texture to GDI bitmap
var mapDest = bitmap.LockBits(boundsRect, ImageLockMode.WriteOnly, bitmap.PixelFormat);
var sourcePtr = mapSource.DataPointer;
var destPtr = mapDest.Scan0;
for (int y = 0; y < height; y++)
{
// Copy a single line
Utilities.CopyMemory(destPtr, sourcePtr, width * 4);
// Advance pointers
sourcePtr = IntPtr.Add(sourcePtr, mapSource.RowPitch);
destPtr = IntPtr.Add(destPtr, mapDest.Stride);
}
// Release source and dest locks
bitmap.UnlockBits(mapDest);
device.ImmediateContext.UnmapSubresource(screenTexture, 0);
// Capture done
captureDone = true;
}
screenResource.Dispose();
duplicatedOutput.ReleaseFrame();
}
catch (SharpDXException e)
{
if (e.ResultCode.Code != SharpDX.DXGI.ResultCode.WaitTimeout.Result.Code)
{
//.........這裏部分代碼省略.........
示例3: IsProfileSupported
/// <summary>
/// Tests to see if the adapter supports the requested profile.
/// </summary>
/// <param name="featureLevel">The graphics profile.</param>
/// <returns>true if the profile is supported</returns>
public bool IsProfileSupported(FeatureLevel featureLevel)
{
// Only way is to instantiate a Direct3D11 device and check the selected
// feature level
bool isProfileSupported = false;
Direct3D11.Device device = null;
try
{
device = new SharpDX.Direct3D11.Device(adapter, DeviceCreationFlags.None, featureLevel);
isProfileSupported = featureLevel == device.FeatureLevel;
}
catch (SharpDXException) { }
finally
{
if (device != null)
device.Dispose();
}
return isProfileSupported;
}
示例4: CreateAdaptersList
//.........這裏部分代碼省略.........
if(supportedDevice)
{
bool outputsAttached = adapter.Outputs.Length > 0;
if (outputsAttached)
{
for (int j = 0; j < adapter.Outputs.Length; j++)
{
var output = adapter.Outputs[j];
info.Name = String.Format("{0} + {1}", adapter.Description.Description, output.Description.DeviceName);
info.OutputName = output.Description.DeviceName;
info.OutputId = j;
var displayModeList = output.GetDisplayModeList(MyRender11Constants.BACKBUFFER_FORMAT, DisplayModeEnumerationFlags.Interlaced);
var adapterDisplayModes = new MyDisplayMode[displayModeList.Length];
for (int k = 0; k < displayModeList.Length; k++)
{
var displayMode = displayModeList[k];
adapterDisplayModes[k] = new MyDisplayMode
{
Height = displayMode.Height,
Width = displayMode.Width,
RefreshRate = displayMode.RefreshRate.Numerator,
RefreshRateDenominator = displayMode.RefreshRate.Denominator
};
}
Array.Sort(adapterDisplayModes, m_refreshRatePriorityComparer);
info.SupportedDisplayModes = adapterDisplayModes;
info.CurrentDisplayMode = adapterDisplayModes[adapterDisplayModes.Length - 1];
LogOutputDisplayModes(ref info);
m_adapterModes[adapterIndex] = displayModeList;
// add one entry per every adapter-output pair
adaptersList.Add(info);
adapterIndex++;
}
}
else
{
// FALLBACK MODES
MyDisplayMode[] fallbackDisplayModes = new MyDisplayMode[] {
new MyDisplayMode(640, 480, 60000, 1000),
new MyDisplayMode(720, 576, 60000, 1000),
new MyDisplayMode(800, 600, 60000, 1000),
new MyDisplayMode(1024, 768, 60000, 1000),
new MyDisplayMode(1152, 864, 60000, 1000),
new MyDisplayMode(1280, 720, 60000, 1000),
new MyDisplayMode(1280, 768, 60000, 1000),
new MyDisplayMode(1280, 800, 60000, 1000),
new MyDisplayMode(1280, 960, 60000, 1000),
new MyDisplayMode(1280, 1024, 60000, 1000),
new MyDisplayMode(1360, 768, 60000, 1000),
new MyDisplayMode(1360, 1024, 60000, 1000),
new MyDisplayMode(1440, 900, 60000, 1000),
new MyDisplayMode(1600, 900, 60000, 1000),
new MyDisplayMode(1600, 1024, 60000, 1000),
new MyDisplayMode(1600, 1200, 60000, 1000),
new MyDisplayMode(1680, 1200, 60000, 1000),
new MyDisplayMode(1680, 1050, 60000, 1000),
new MyDisplayMode(1920, 1080, 60000, 1000),
new MyDisplayMode(1920, 1200, 60000, 1000),
};
info.OutputName = "FallbackOutput";
info.Name = String.Format("{0}", adapter.Description.Description);
info.OutputId = 0;
info.CurrentDisplayMode = fallbackDisplayModes[fallbackDisplayModes.Length - 1];
info.SupportedDisplayModes = fallbackDisplayModes;
info.FallbackDisplayModes = true;
// add one entry for adapter-fallback output pair
adaptersList.Add(info);
adapterIndex++;
}
}
else
{
info.SupportedDisplayModes = new MyDisplayMode[0];
}
MyRender11.Log.WriteLine("Fallback display modes = " + info.FallbackDisplayModes);
LogAdapterInfoEnd();
if(adapterTestDevice != null)
{
adapterTestDevice.Dispose();
adapterTestDevice = null;
}
}
return adaptersList.ToArray();
}
示例5: CreateAdaptersList
//.........這裏部分代碼省略.........
DeviceName = adapter.Description.Description,
Description = deviceDesc,
IsSupported = supportedDevice,
AdapterDeviceId = i,
Has512MBRam = vram > 500000000,
HDRSupported = true,
MaxTextureSize = SharpDX.Direct3D11.Texture2D.MaximumTexture2DSize,
VRAM = vram,
MultithreadedRenderingSupported = supportsCommandLists
};
if(vram >= 2000000000)
{
info.MaxTextureQualitySupported = MyTextureQuality.HIGH;
}
else if (vram >= 1000000000)
{
info.MaxTextureQualitySupported = MyTextureQuality.MEDIUM;
}
else
{
info.MaxTextureQualitySupported = MyTextureQuality.LOW;
}
info.MaxAntialiasingModeSupported = MyAntialiasingMode.FXAA;
if (supportedDevice)
{
if (adapterTestDevice.CheckMultisampleQualityLevels(Format.R11G11B10_Float, 2) > 0)
{
info.MaxAntialiasingModeSupported = MyAntialiasingMode.MSAA_2;
}
if (adapterTestDevice.CheckMultisampleQualityLevels(Format.R11G11B10_Float, 4) > 0)
{
info.MaxAntialiasingModeSupported = MyAntialiasingMode.MSAA_4;
}
if (adapterTestDevice.CheckMultisampleQualityLevels(Format.R11G11B10_Float, 8) > 0)
{
info.MaxAntialiasingModeSupported = MyAntialiasingMode.MSAA_8;
}
}
LogAdapterInfoBegin(ref info);
if(supportedDevice)
{
for(int j=0; j<factory.Adapters[i].Outputs.Length; j++)
{
var output = factory.Adapters[i].Outputs[j];
info.Name = String.Format("{0} + {1}", adapter.Description.Description, output.Description.DeviceName);
info.OutputName = output.Description.DeviceName;
info.OutputId = j;
var displayModeList = factory.Adapters[i].Outputs[j].GetDisplayModeList(MyRender11Constants.BACKBUFFER_FORMAT, DisplayModeEnumerationFlags.Interlaced);
var adapterDisplayModes = new MyDisplayMode[displayModeList.Length];
for (int k = 0; k < displayModeList.Length; k++)
{
var displayMode = displayModeList[k];
adapterDisplayModes[k] = new MyDisplayMode
{
Height = displayMode.Height,
Width = displayMode.Width,
RefreshRate = displayMode.RefreshRate.Numerator,
RefreshRateDenominator = displayMode.RefreshRate.Denominator
};
}
Array.Sort(adapterDisplayModes, m_refreshRatePriorityComparer);
info.SupportedDisplayModes = adapterDisplayModes;
info.CurrentDisplayMode = adapterDisplayModes[adapterDisplayModes.Length - 1];
adaptersList.Add(info);
m_adapterModes[adapterIndex] = displayModeList;
adapterIndex++;
LogOutputDisplayModes(ref info);
}
}
else
{
info.SupportedDisplayModes = new MyDisplayMode[0];
adaptersList.Add(info);
adapterIndex++;
}
LogAdapterInfoEnd();
if(adapterTestDevice != null)
{
adapterTestDevice.Dispose();
adapterTestDevice = null;
}
}
return adaptersList.ToArray();
}
示例6: Main
//.........這裏部分代碼省略.........
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// EFFECT SETUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Effect 1 : BitmapSource - take decoded image data and get a BitmapSource from it
var bitmapSourceEffect = new d2.Effects.BitmapSource(d2dContext);
bitmapSourceEffect.WicBitmapSource = formatConverter;
// Effect 2 : GaussianBlur - give the bitmapsource a gaussian blurred effect
var gaussianBlurEffect = new d2.Effects.GaussianBlur(d2dContext);
gaussianBlurEffect.SetInput(0, bitmapSourceEffect.Output, true);
gaussianBlurEffect.StandardDeviation = 5f;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// OVERLAY TEXT SETUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
var textFormat = new dw.TextFormat(dwFactory, "Arial", 15f); // create the text format of specified font configuration
// draw a long text to show the automatic line wrapping
var textToDraw = "Some long text to show the drawing of preformatted "
+ "glyphs using DirectWrite on the Direct2D surface."
+ " Notice the automatic wrapping of line if it exceeds desired width.";
// create the text layout - this improves the drawing performance for static text
// as the glyph positions are precalculated
var textLayout = new dw.TextLayout(dwFactory, textToDraw, textFormat, 300f, 1000f);
var textBrush = new d2.SolidColorBrush(d2dContext, Color.LightGreen);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// RENDER TARGET SETUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// create the d2d bitmap description using default flags (from SharpDX samples) and 96 DPI
var d2dBitmapProps = new d2.BitmapProperties1(d2PixelFormat, 96, 96, d2.BitmapOptions.Target | d2.BitmapOptions.CannotDraw);
// the render target
var d2dRenderTarget = new d2.Bitmap1(d2dContext, new Size2(pixelWidth, pixelHeight), d2dBitmapProps);
d2dContext.Target = d2dRenderTarget; // associate bitmap with the d2d context
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// DRAWING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// slow preparations - fast drawing:
d2dContext.BeginDraw();
d2dContext.DrawImage(gaussianBlurEffect);
d2dContext.DrawTextLayout(new Vector2(5f, 5f), textLayout, textBrush);
d2dContext.EndDraw();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// IMAGE SAVING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// delete the output file if it already exists
if (System.IO.File.Exists(outputPath)) System.IO.File.Delete(outputPath);
// use the appropiate overload to write either to stream or to a file
var stream = new wic.WICStream(imagingFactory, outputPath, NativeFileAccess.Write);
// select the image encoding format HERE
var encoder = new wic.PngBitmapEncoder(imagingFactory);
encoder.Initialize(stream);
var bitmapFrameEncode = new wic.BitmapFrameEncode(encoder);
bitmapFrameEncode.Initialize();
bitmapFrameEncode.SetSize(pixelWidth, pixelHeight);
bitmapFrameEncode.SetPixelFormat(ref wicPixelFormat);
// this is the trick to write D2D1 bitmap to WIC
var imageEncoder = new wic.ImageEncoder(imagingFactory, d2dDevice);
imageEncoder.WriteFrame(d2dRenderTarget, bitmapFrameEncode, new wic.ImageParameters(d2PixelFormat, 96, 96, 0, 0, pixelWidth, pixelHeight));
bitmapFrameEncode.Commit();
encoder.Commit();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// CLEANUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// dispose everything and free used resources
bitmapFrameEncode.Dispose();
encoder.Dispose();
stream.Dispose();
textBrush.Dispose();
textLayout.Dispose();
textFormat.Dispose();
formatConverter.Dispose();
gaussianBlurEffect.Dispose();
bitmapSourceEffect.Dispose();
d2dRenderTarget.Dispose();
inputStream.Dispose();
decoder.Dispose();
d2dContext.Dispose();
dwFactory.Dispose();
imagingFactory.Dispose();
d2dDevice.Dispose();
dxgiDevice.Dispose();
d3dDevice.Dispose();
defaultDevice.Dispose();
// show the result
System.Diagnostics.Process.Start(outputPath);
}
示例7: Resize
public static MemoryStream Resize(System.IO.Stream source, int maxwidth, int maxheight, Action beforeDrawImage, Action afterDrawImage)
{
// initialize the D3D device which will allow to render to image any graphics - 3D or 2D
var defaultDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Warp,
d3d.DeviceCreationFlags.BgraSupport | d3d.DeviceCreationFlags.SingleThreaded | d3d.DeviceCreationFlags.PreventThreadingOptimizations);
var d3dDevice = defaultDevice.QueryInterface<d3d.Device1>(); // get a reference to the Direct3D 11.1 device
var dxgiDevice = d3dDevice.QueryInterface<dxgi.Device>(); // get a reference to DXGI device
var d2dDevice = new d2.Device(dxgiDevice); // initialize the D2D device
var imagingFactory = new wic.ImagingFactory2(); // initialize the WIC factory
// initialize the DeviceContext - it will be the D2D render target and will allow all rendering operations
var d2dContext = new d2.DeviceContext(d2dDevice, d2.DeviceContextOptions.None);
var dwFactory = new dw.Factory();
// specify a pixel format that is supported by both D2D and WIC
var d2PixelFormat = new d2.PixelFormat(dxgi.Format.R8G8B8A8_UNorm, d2.AlphaMode.Premultiplied);
// if in D2D was specified an R-G-B-A format - use the same for wic
var wicPixelFormat = wic.PixelFormat.Format32bppPRGBA;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// IMAGE LOADING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
var decoder = new wic.BitmapDecoder(imagingFactory,source, wic.DecodeOptions.CacheOnLoad);
// decode the loaded image to a format that can be consumed by D2D
var formatConverter = new wic.FormatConverter(imagingFactory);
formatConverter.Initialize(decoder.GetFrame(0), wicPixelFormat);
// store the image size - output will be of the same size
var inputImageSize = formatConverter.Size;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// RENDER TARGET SETUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// create the d2d bitmap description using default flags (from SharpDX samples) and 96 DPI
var d2dBitmapProps = new d2.BitmapProperties1(d2PixelFormat, 96, 96, d2.BitmapOptions.Target | d2.BitmapOptions.CannotDraw);
//Calculate size
var resultSize = MathUtil.ScaleWithin(inputImageSize.Width,inputImageSize.Height,maxwidth,maxheight);
var newWidth = resultSize.Item1;
var newHeight = resultSize.Item2;
// the render target
var d2dRenderTarget = new d2.Bitmap1(d2dContext, new Size2(newWidth, newHeight), d2dBitmapProps);
d2dContext.Target = d2dRenderTarget; // associate bitmap with the d2d context
var bitmapSourceEffect = new d2.Effects.BitmapSourceEffect(d2dContext);
bitmapSourceEffect.WicBitmapSource = formatConverter;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// DRAWING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
beforeDrawImage();
// slow preparations - fast drawing:
d2dContext.BeginDraw();
d2dContext.Transform = Matrix3x2.Scaling(new Vector2((float)(newWidth / (float)inputImageSize.Width), (float)(newHeight / (float)inputImageSize.Height)));
d2dContext.DrawImage(bitmapSourceEffect, d2.InterpolationMode.HighQualityCubic);
d2dContext.EndDraw();
afterDrawImage();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// IMAGE SAVING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
var ms = new MemoryStream();
// use the appropiate overload to write either to stream or to a file
var stream = new wic.WICStream(imagingFactory,ms);
// select the image encoding format HERE
var encoder = new wic.JpegBitmapEncoder(imagingFactory);
encoder.Initialize(stream);
var bitmapFrameEncode = new wic.BitmapFrameEncode(encoder);
bitmapFrameEncode.Initialize();
bitmapFrameEncode.SetSize(newWidth, newHeight);
bitmapFrameEncode.SetPixelFormat(ref wicPixelFormat);
// this is the trick to write D2D1 bitmap to WIC
var imageEncoder = new wic.ImageEncoder(imagingFactory, d2dDevice);
imageEncoder.WriteFrame(d2dRenderTarget, bitmapFrameEncode, new wic.ImageParameters(d2PixelFormat, 96, 96, 0, 0, newWidth, newHeight));
bitmapFrameEncode.Commit();
encoder.Commit();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// CLEANUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// dispose everything and free used resources
bitmapFrameEncode.Dispose();
encoder.Dispose();
stream.Dispose();
formatConverter.Dispose();
bitmapSourceEffect.Dispose();
d2dRenderTarget.Dispose();
decoder.Dispose();
d2dContext.Dispose();
dwFactory.Dispose();
//.........這裏部分代碼省略.........
示例8: Main
static void Main(string[] args)
{
// Select a File to play
var openFileDialog = new OpenFileDialog { Title = "Select a file", Filter = "Media Files(*.WMV;*.MP4;*.AVI)|*.WMV;*.MP4;*.AVI" };
var result = openFileDialog.ShowDialog();
if (result == DialogResult.Cancel)
{
return;
}
// Initialize MediaFoundation
MediaManager.Startup();
var renderForm = new SharpDX.Windows.RenderForm();
device = CreateDeviceForVideo(out dxgiManager);
// Creates the MediaEngineClassFactory
var mediaEngineFactory = new MediaEngineClassFactory();
//Assign our dxgi manager, and set format to bgra
MediaEngineAttributes attr = new MediaEngineAttributes();
attr.VideoOutputFormat = (int)SharpDX.DXGI.Format.B8G8R8A8_UNorm;
attr.DxgiManager = dxgiManager;
// Creates MediaEngine for AudioOnly
var mediaEngine = new MediaEngine(mediaEngineFactory, attr, MediaEngineCreateFlags.None);
// Register our PlayBackEvent
mediaEngine.PlaybackEvent += OnPlaybackCallback;
// Query for MediaEngineEx interface
mediaEngineEx = mediaEngine.QueryInterface<MediaEngineEx>();
// Opens the file
var fileStream = openFileDialog.OpenFile();
// Create a ByteStream object from it
var stream = new ByteStream(fileStream);
// Creates an URL to the file
var url = new Uri(openFileDialog.FileName, UriKind.RelativeOrAbsolute);
// Set the source stream
mediaEngineEx.SetSourceFromByteStream(stream, url.AbsoluteUri);
// Wait for MediaEngine to be ready
if (!eventReadyToPlay.WaitOne(1000))
{
Console.WriteLine("Unexpected error: Unable to play this file");
}
//Create our swapchain
swapChain = CreateSwapChain(device, renderForm.Handle);
//Get DXGI surface to be used by our media engine
var texture = Texture2D.FromSwapChain<Texture2D>(swapChain, 0);
var surface = texture.QueryInterface<SharpDX.DXGI.Surface>();
//Get our video size
int w, h;
mediaEngine.GetNativeVideoSize(out w, out h);
// Play the music
mediaEngineEx.Play();
long ts;
RenderLoop.Run(renderForm, () =>
{
//Transfer frame if a new one is available
if (mediaEngine.OnVideoStreamTick(out ts))
{
mediaEngine.TransferVideoFrame(surface, null, new SharpDX.Rectangle(0, 0, w, h), null);
}
swapChain.Present(1, SharpDX.DXGI.PresentFlags.None);
});
mediaEngine.Shutdown();
swapChain.Dispose();
device.Dispose();
}