本文整理汇总了C#中Texture.GetBuffer方法的典型用法代码示例。如果您正苦于以下问题:C# Texture.GetBuffer方法的具体用法?C# Texture.GetBuffer怎么用?C# Texture.GetBuffer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Texture
的用法示例。
在下文中一共展示了Texture.GetBuffer方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CalculateAplha
public static unsafe void CalculateAplha(Texture texture)
{
if (texture == null) return;
if (texture.Format == PixelFormat.PF_X8R8G8B8) return;
if (texture.Format != PixelFormat.PF_A8R8G8B8) throw new ArgumentException("Currently only ARGB textures allowed.");
HardwareBuffer buffer = texture.GetBuffer();
var data = (uint*)buffer.Lock(HardwareBuffer.LockOptions.HBL_DISCARD);
try
{
var size = sizeof(uint);
var width = texture.Width;
var height = texture.Height;
var pitch = (width / size) * size + ((width % size) > 0 ? size : 0);
for (var y = 0; y < height; ++y)
{
for (var x = 0; x < width; ++x)
{
var pixel = data[pitch * y + x];
var r = (byte)((pixel & 0x00FF0000) >> 16);
var g = (byte)((pixel & 0x0000FF00) >> 8);
var b = (byte)(pixel & 0x000000FF);
var a = (r + g + b) / 3;
pixel = (uint)((a << 24) + (r << 16) + (g << 8) + b);
data[pitch * y + x] = pixel;
}
}
}
finally
{
buffer.Unlock();
}
}
示例2: InitCameraViewFromTarget
private void InitCameraViewFromTarget()
{
int textureSize = 1024;
cameraTexture = TextureManager.Instance.Create(
TextureManager.Instance.GetUniqueName("RemoteView"), Texture.Type.Type2D,
new Vec2I(textureSize, textureSize), 1, 0, PixelFormat.R8G8B8, Texture.Usage.RenderTarget);
RenderTexture renderTexture = cameraTexture.GetBuffer().GetRenderTarget();
rmCamera = SceneManager.Instance.CreateCamera("RemoteView");
rmCamera.ProjectionType = ProjectionTypes.Perspective;
rmCamera.PolygonMode = PolygonMode.Wireframe;
renderTexture.AddViewport(rmCamera);
}
示例3: DensityMap
/// <summary>
///
/// </summary>
/// <param name="texture"></param>
/// <param name="channel"></param>
private DensityMap(Texture texture, MapChannel channel)
{
Debug.Assert(texture != null);
mFilter = MapFilter.Bilinear;
//Add self to selfList
mSelfKey = texture.Name + (int)channel;
mSelfList.Add(mSelfKey, this);
mRefCount = 0;
//Get the texture buffer
HardwarePixelBuffer buff = texture.GetBuffer();
//Prepare a PixelBox (8-bit greyscale) to receive the density values
mPixels = new PixelBox(new BasicBox(0, 0, buff.Width, buff.Height), PixelFormat.BYTE_L);
byte[] pixelData = new byte[mPixels.ConsecutiveSize];
mPixelPtr = Memory.PinObject(pixelData);
mPixels.Data = mPixelPtr;
if (channel == MapChannel.Color)
{
//Copy to the greyscale density map directly if no channel extraction is necessary
buff.BlitToMemory(mPixels);
}
else
{
unsafe
{
//If channel extraction is necessary, first convert to a PF_R8G8B8A8 format PixelBox
//This is necessary for the code below to properly extract the desired channel
PixelBox tmpPixels = new PixelBox(new BasicBox(0, 0, buff.Width, buff.Height), PixelFormat.R8G8B8A8);
byte[] tmpPix = new byte[tmpPixels.ConsecutiveSize];
byte* pixPtr = (byte*)Memory.PinObject(tmpPix);
tmpPixels.Data = (IntPtr)pixPtr;
buff.BlitToMemory(tmpPixels);
//Pick out a channel from the pixel buffer
int channelOffset = 0;
switch (channel)
{
case MapChannel.Red:
channelOffset = 3;
break;
case MapChannel.Green:
channelOffset = 2;
break;
case MapChannel.Blue:
channelOffset = 1;
break;
case MapChannel.Alpha:
channelOffset = 0;
break;
default:
//should never happen
throw new Exception("Invalid channel");
}
//And copy that channel into the density map
byte* inputPtr = (byte*)pixPtr + channelOffset;
byte* outputPtr = (byte*)pixPtr + channelOffset;
byte* outputEndPtr = outputPtr + mPixels.ConsecutiveSize;
while (outputPtr != outputEndPtr)
{
*outputPtr++ = *inputPtr++;
inputPtr += 4;
}
//Finally, delete the temporary PF_R8G8B8A8 pixel buffer
Memory.UnpinObject(tmpPix);
tmpPixels = null;
}
}
}
示例4: ColorMap
/// <summary>
///
/// </summary>
/// <param name="texture"></param>
/// <param name="channel"></param>
private ColorMap(Texture texture, MapChannel channel)
{
Debug.Assert(texture != null);
mFilter = MapFilter.Bilinear;
//Add self to selfList
mSelfKey = texture.Name + (int)channel;
mSelfList.Add(mSelfKey, this);
mRefCount = 0;
//Get the texture buffer
HardwarePixelBuffer buff = texture.GetBuffer();
#warning Root::getSingleton().getRenderSystem()->getColourVertexElementType();
//Prepare a PixelBox (24-bit RGB) to receive the color values
VertexElementType format = VertexElementType.Color_ARGB;
switch (format)
{
case VertexElementType.Color_ARGB:
//DirectX9
mPixels = new PixelBox(new BasicBox(0, 0, buff.Width, buff.Height), PixelFormat.A8B8G8R8);
break;
case VertexElementType.Color_ABGR:
//OpenGL
mPixels = new PixelBox(new BasicBox(0, 0, buff.Width, buff.Height), PixelFormat.A8B8G8R8);
//Patch for Ogre's incorrect blitToMemory() when copying from PF_L8 in OpenGL
if (buff.Format == PixelFormat.L8)
channel = MapChannel.Red;
break;
default:
throw new Exception("Unknown RenderSystem color format");
}
byte[] pixelData = new byte[mPixels.ConsecutiveSize];
mPixelPtr = Memory.PinObject(pixelData);
mPixels.Data = mPixelPtr;
if (channel == MapChannel.Color)
{
//Copy to the color map directly if no channel extraction is necessary
buff.BlitToMemory(mPixels);
}
else
{
unsafe
{
//If channel extraction is necessary, first convert to a PF_R8G8B8A8 format PixelBox
//This is necessary for the code below to properly extract the desired channel
PixelBox tmpPixels = new PixelBox(new BasicBox(0, 0, buff.Width, buff.Height), PixelFormat.R8G8B8A8);
byte[] tmpPix = new byte[tmpPixels.ConsecutiveSize];
byte* pixPtr = (byte*)Memory.PinObject(tmpPix);
tmpPixels.Data = (IntPtr)pixPtr;
buff.BlitToMemory(tmpPixels);
//Pick out a channel from the pixel buffer
int channelOffset = 0;
switch (channel)
{
case MapChannel.Red:
channelOffset = 3;
break;
case MapChannel.Green:
channelOffset = 2;
break;
case MapChannel.Blue:
channelOffset = 1;
break;
case MapChannel.Alpha:
channelOffset = 0;
break;
default:
//should never happen
throw new Exception("Invalid channel");
}
//And copy that channel into the density map
byte* inputPtr = (byte*)pixPtr + channelOffset;
byte* outputPtr = (byte*)pixPtr + channelOffset;
byte* outputEndPtr = outputPtr + mPixels.ConsecutiveSize;
while (outputPtr != outputEndPtr)
{
*outputPtr++ = *inputPtr;
*outputPtr++ = *inputPtr;
*outputPtr++ = *inputPtr;
*outputPtr++ = 0xFF; //Full alpha
inputPtr += 4;
}
//Finally, delete the temporary PF_R8G8B8A8 pixel buffer
Memory.UnpinObject(tmpPix);
tmpPixels = null;
tmpPix = null;
}
}
}
示例5: InitCameraViewFromTarget
private void InitCameraViewFromTarget()
{
int textureSize = 1024;
cameraTexture = TextureManager.Instance.Create(
TextureManager.Instance.GetUniqueName("RemoteView"), Texture.Type.Type2D,
new Vec2I(textureSize, textureSize), 1, 0, PixelFormat.R8G8B8, Texture.Usage.RenderTarget);
RenderTexture renderTexture = cameraTexture.GetBuffer().GetRenderTarget();
//you can update render texture manually by means renderTexture.Update() method. For this task set AutoUpdate = false;
renderTexture.AutoUpdate = true;
//create camera
string cameraName = SceneManager.Instance.GetUniqueCameraName("RemoteView");
//rmCamera.Position = new Vec3(0, 0, 30);
//rmCamera.LookAt(new Vec3(0,-.75f, 0));
rmCamera = SceneManager.Instance.CreateCamera(cameraName);
rmCamera.ProjectionType = ProjectionTypes.Perspective;
rmCamera.PolygonMode = PolygonMode.Wireframe;
//rmCamera.Position = new Vec3(0,0,500);
//rmCamera.LookAt(new Vec3(0, -30, -30));
renderTexture.AddViewport(rmCamera);
}
示例6: CreateRenderTexture
///////////////
private bool CreateRenderTexture()
{
Vec2I size = new Vec2I(512, 256);
string textureName = TextureManager.Instance.GetUniqueName("RenderToTextureExample");
texture = TextureManager.Instance.Create(textureName, Texture.Type.Type2D, size, 1, 0,
PixelFormat.R8G8B8, Texture.Usage.RenderTarget);
if (texture == null)
return false;
renderTexture = texture.GetBuffer().GetRenderTarget();
//you can update render texture manually by means renderTexture.Update() method. For this task set AutoUpdate = false;
renderTexture.AutoUpdate = true;
//create camera
string cameraName = SceneManager.Instance.GetUniqueCameraName("RenderToTextureExample");
camera = SceneManager.Instance.CreateCamera(cameraName);
camera.Purpose = Camera.Purposes.Special;
camera.AllowMapCompositorManager = false;
//add viewport
viewport = renderTexture.AddViewport(camera);
viewport.BackgroundColor = new ColorValue(0, 0, 0, 1);
viewport.ShadowsEnabled = false;
viewport.MaterialScheme = "";
//add listener
renderTargetListener = new SceneRenderTargetListener(this);
renderTexture.AddListener(renderTargetListener);
return true;
}