本文整理汇总了C#中RenderTexture.AddViewport方法的典型用法代码示例。如果您正苦于以下问题:C# RenderTexture.AddViewport方法的具体用法?C# RenderTexture.AddViewport怎么用?C# RenderTexture.AddViewport使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类RenderTexture
的用法示例。
在下文中一共展示了RenderTexture.AddViewport方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CreateRenderTarget
bool CreateRenderTarget()
{
DestroyRenderTarget();
if( RendererWorld.Instance == null )
return false;
Vec2I textureSize = GetDemandTextureSize();
if( textureSize.X < 1 || textureSize.Y < 1 )
return false;
string textureName = TextureManager.Instance.GetUniqueName( "WPFRenderTexture" );
int hardwareFSAA = 0;
if( !RendererWorld.InitializationOptions.AllowSceneMRTRendering )
{
if( !int.TryParse( RendererWorld.InitializationOptions.FullSceneAntialiasing, out hardwareFSAA ) )
hardwareFSAA = 0;
}
texture = TextureManager.Instance.Create( textureName, Texture.Type.Type2D, textureSize,
1, 0, Engine.Renderer.PixelFormat.R8G8B8, Texture.Usage.RenderTarget, false, hardwareFSAA );
if( texture == null )
return false;
currentTextureSize = textureSize;
renderTexture = texture.GetBuffer().GetRenderTarget();
renderTexture.AutoUpdate = false;
renderTexture.AllowAdditionalMRTs = true;
camera = SceneManager.Instance.CreateCamera(
SceneManager.Instance.GetUniqueCameraName( "UserControl" ) );
camera.Purpose = Camera.Purposes.MainCamera;
//update camera settings
camera.NearClipDistance = cameraNearFarClipDistance.Minimum;
camera.FarClipDistance = cameraNearFarClipDistance.Maximum;
camera.AspectRatio = (float)texture.Size.X / (float)texture.Size.Y;
camera.FixedUp = cameraFixedUp;
camera.Position = cameraPosition;
camera.Direction = cameraDirection;
camera.Fov = cameraFov;
camera.ProjectionType = cameraProjectionType;
camera.OrthoWindowHeight = cameraOrthoWindowHeight;
viewport = renderTexture.AddViewport( camera );
//Initialize HDR compositor for HDR render technique
if( EngineApp.RenderTechnique == "HDR" )
{
viewport.AddCompositor( "HDR", 0 );
viewport.SetCompositorEnabled( "HDR", true );
}
//Initialize Fast Approximate Antialiasing (FXAA)
{
bool useMRT = RendererWorld.InitializationOptions.AllowSceneMRTRendering;
string fsaa = RendererWorld.InitializationOptions.FullSceneAntialiasing;
if( ( useMRT && ( fsaa == "" || fsaa == "RecommendedSetting" ) && IsActivateFXAAByDefault() ) ||
fsaa == "FXAA" )
{
if( RenderSystem.Instance.HasShaderModel3() )
InitializeFXAACompositor();
}
}
//add listener
renderTargetListener = new ViewRenderTargetListener( this );
renderTexture.AddListener( renderTargetListener );
if( guiRenderer == null )
guiRenderer = new GuiRenderer( viewport );
else
guiRenderer.ChangeViewport( viewport );
if( controlManager == null )
controlManager = new ScreenControlManager( guiRenderer );
//initialize D3DImage output
if( d3dImageIsSupported && allowUsingD3DImage )
{
// create a D3DImage to host the scene and monitor it for changes in front buffer availability
if( d3dImage == null )
{
d3dImage = new D3DImage();
d3dImage.IsFrontBufferAvailableChanged += D3DImage_IsFrontBufferAvailableChanged;
CompositionTarget.Rendering += D3DImage_OnRendering;
}
// set output to background image
Background = new ImageBrush( d3dImage );
// set the back buffer using the new scene pointer
HardwarePixelBuffer buffer = texture.GetBuffer( 0, 0 );
GetD3D9HardwarePixelBufferData data = new GetD3D9HardwarePixelBufferData();
data.hardwareBuffer = buffer._GetRealObject();
data.outPointer = IntPtr.Zero;
unsafe
//.........这里部分代码省略.........
示例2: CreateReflectionTexture
void CreateReflectionTexture()
{
DestroyReflectionTexture();
Vec2i textureSize = GetRequiredReflectionTextureSize();
//create render texture
string textureName = TextureManager.Instance.GetUniqueName( "WaterPlaneReflection" );
bool hdr = RendererWorld.Instance.DefaultViewport.GetCompositorInstance( "HDR" ) != null;
reflectionTexture = TextureManager.Instance.Create( textureName, Texture.Type.Type2D,
textureSize, 1, 0, hdr ? PixelFormat.Float16RGB : PixelFormat.R8G8B8,
Texture.Usage.RenderTarget );
reflectionRenderTexture = reflectionTexture.GetBuffer().GetRenderTarget();
//create camera
reflectionCamera = SceneManager.Instance.CreateCamera();
reflectionCamera.AllowFrustumTestMode = true;
//add viewport
reflectionViewport = reflectionRenderTexture.AddViewport( reflectionCamera );
reflectionViewport.ShadowsEnabled = false;
reflectionViewport.MaterialScheme = MaterialSchemes.Low.ToString();
//add listener
renderTargetListener = new ReflectionRenderTargetListener( this );
reflectionRenderTexture.AddListener( renderTargetListener );
reflectionRenderTexture.AutoUpdate = Visible;
}
示例3: Setup
protected override bool Setup()
{
ReadConfigs();
CreateTasks();
mMapExporter = new GorgonMapExporter() { atlasHeight = 4096, atlasWidth = 4096 };
mImageExporter = new PngImageExporter();
var setup = base.Setup();
var ptr = TextureManager.Singleton.CreateManual("RttTex",
ResourceGroupManager.DEFAULT_RESOURCE_GROUP_NAME,
TextureType.TEX_TYPE_2D,
148,
148,
0,
PixelFormat.PF_R8G8B8A8,
(int)TextureUsage.TU_RENDERTARGET
);
mRTT = ptr.GetBuffer().GetRenderTarget();
mRTVP = mRTT.AddViewport(mCamera);
mRTVP.BackgroundColour = new ColourValue(0, 0, 0, 0);
mRTVP.SetClearEveryFrame(true);
mRTVP.OverlaysEnabled = false;
//Calculate diagonal distance value
//mPythDistance = (mDistance / Mogre.Math.Sqrt(2));
var altitude = new Degree(mCameraAngle);
var angles = new float[]{
180f, // South
135f, // Southeast
90f, // East
45f, // Northeast
0f, // North
-45f, // Northwest
-90f, // West
-135f, // Southwest
};
mCameraDirections = new List<string> {
"s",
"se",
"e",
"ne",
"n",
"nw",
"w",
"sw"
};
mCameraPositions = new List<Vector3>();
for (var i = 0; i < 8; i++)
{
float azimuth = angles[i];
string dirname = mCameraDirections[i];
Vector3 pos = getPosOnSphere(mDistance, new Degree(-azimuth), -altitude);
mCameraPositions.Add(pos);
Console.WriteLine("Determined camera pos: {0,2} is {1,5:F2},{2,5:F2},{3,5:F2}", dirname, pos.x, pos.y, pos.z);
}
/*
mCameraPositions = new List<Vector3> {
new Vector3(0, mDistance, mDistance), // Front / South
new Vector3(-mPythDistance, mDistance, mPythDistance), // Front-right / southwest
new Vector3(-mDistance, mDistance, 0), // Right / west
new Vector3(-mPythDistance, mDistance, -mPythDistance), // Back-right / northwest
new Vector3(0, mDistance, -mDistance), // Back / north
new Vector3(mPythDistance, mDistance, -mPythDistance), // Back-left / northeast
new Vector3(mDistance, mDistance, 0), // Left / east
new Vector3(mPythDistance, mDistance, mPythDistance), // Front-left / southeast
};
*/
//CompositorManager.Singleton.AddCompositor(vp, "EdgeDetectCompositor", 0);
//CompositorManager.Singleton.AddCompositor(rtvp, "EdgeDetectCompositor", 0);
//CompositorManager.Singleton.SetCompositorEnabled(vp, "EdgeDetectCompositor", true);
//CompositorManager.Singleton.SetCompositorEnabled(rtvp, "EdgeDetectCompositor", true);
/*CompositorManager.Singleton.AddCompositor(vp, "Pixelate", 0);
CompositorManager.Singleton.AddCompositor(mRTVP, "Pixelate", 0);
CompositorManager.Singleton.SetCompositorEnabled(vp, "Pixelate", true);
CompositorManager.Singleton.SetCompositorEnabled(mRTVP, "Pixelate", true);*/
//CompositorManager.Singleton.AddCompositor(vp, "Normal", 0);
//CompositorManager.Singleton.AddCompositor(rtvp, "Normal", 0);
//CompositorManager.Singleton.SetCompositorEnabled(vp, "Normal", true);
//CompositorManager.Singleton.SetCompositorEnabled(rtvp, "Normal", true);
//CompositorManager.Singleton.AddCompositor(vp, "SMAA", 0);
//CompositorManager.Singleton.AddCompositor(rtvp, "SMAA", 0);
//CompositorManager.Singleton.SetCompositorEnabled(vp, "SMAA", true);
//CompositorManager.Singleton.SetCompositorEnabled(rtvp, "SMAA", true);
//CompositorManager.Singleton.AddCompositor(vp, "FXAA", 0);
//CompositorManager.Singleton.AddCompositor(rtvp, "FXAA", 0);
//CompositorManager.Singleton.SetCompositorEnabled(vp, "FXAA", true);
//CompositorManager.Singleton.SetCompositorEnabled(rtvp, "FXAA", true);
//Set up task scheduler
imageTrimScheduler = new LimitedConcurrencyLevelTaskScheduler(3);
imageTrimTaskFactory = new TaskFactory(imageTrimScheduler);
imagePackScheduler = new LimitedConcurrencyLevelTaskScheduler(1);
imagePackTaskFactory = new TaskFactory(imagePackScheduler);
//.........这里部分代码省略.........
示例4: CreateRenderTexture
///////////////
private bool CreateRenderTexture()
{
Vec2I size = new Vec2I(512, 256);
string textureName = TextureManager.Instance.GetUniqueName("RenderToTextureExample");
texture = TextureManager.Instance.Create(textureName, Texture.Type.Type2D, size, 1, 0,
PixelFormat.R8G8B8, Texture.Usage.RenderTarget);
if (texture == null)
return false;
renderTexture = texture.GetBuffer().GetRenderTarget();
//you can update render texture manually by means renderTexture.Update() method. For this task set AutoUpdate = false;
renderTexture.AutoUpdate = true;
//create camera
string cameraName = SceneManager.Instance.GetUniqueCameraName("RenderToTextureExample");
camera = SceneManager.Instance.CreateCamera(cameraName);
camera.Purpose = Camera.Purposes.Special;
camera.AllowMapCompositorManager = false;
//add viewport
viewport = renderTexture.AddViewport(camera);
viewport.BackgroundColor = new ColorValue(0, 0, 0, 1);
viewport.ShadowsEnabled = false;
viewport.MaterialScheme = "";
//add listener
renderTargetListener = new SceneRenderTargetListener(this);
renderTexture.AddListener(renderTargetListener);
return true;
}