本文整理汇总了C#中RenderContext.IsHdrEnabled方法的典型用法代码示例。如果您正苦于以下问题:C# RenderContext.IsHdrEnabled方法的具体用法?C# RenderContext.IsHdrEnabled怎么用?C# RenderContext.IsHdrEnabled使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类RenderContext
的用法示例。
在下文中一共展示了RenderContext.IsHdrEnabled方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (numberOfNodes == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = _effect.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.DepthStencilState = DepthStencilState.None;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
var viewport = graphicsDevice.Viewport;
_parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
_parameterGBuffer0.SetValue(context.GBuffer0);
_parameterGBuffer1.SetValue(context.GBuffer1);
var cameraNode = context.CameraNode;
var cameraPose = cameraNode.PoseWorld;
Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
var isHdrEnabled = context.IsHdrEnabled();
for (int i = 0; i < numberOfNodes; i++)
{
var lightNode = nodes[i] as LightNode;
if (lightNode == null)
continue;
var light = lightNode.Light as ProjectorLight;
if (light == null)
continue;
// LightNode is visible in current frame.
lightNode.LastFrame = frame;
float hdrScale = isHdrEnabled ? light.HdrScale : 1;
_parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
_parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale);
_parameterTexture.SetValue(light.Texture);
var lightPose = lightNode.PoseWorld;
_parameterPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));
_parameterRange.SetValue(light.Projection.Far);
_parameterAttenuation.SetValue(light.Attenuation);
_parameterTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * light.Projection * (lightPose.Inverse * new Pose(cameraPose.Position))));
var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode);
var texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
var texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);
// Convert frustum far corners from view space to world space.
for (int j = 0; j < _frustumFarCorners.Length; j++)
_frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]);
_parameterFrustumCorners.SetValue(_frustumFarCorners);
bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null);
if (hasShadow)
{
switch (lightNode.Shadow.ShadowMaskChannel)
{
case 0: _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break;
case 1: _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break;
case 2: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break;
default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break;
}
_parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask);
}
if (lightNode.Clip != null)
{
var data = lightNode.RenderData as LightRenderData;
if (data == null)
{
data = new LightRenderData();
lightNode.RenderData = data;
}
data.UpdateClipSubmesh(context.GraphicsService, lightNode);
graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
_parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
_passClip.Apply();
//.........这里部分代码省略.........
示例2: Render
//.........这里部分代码省略.........
if (node.OcclusionQuery.IsComplete)
{
node.TryUpdateSunOcclusion();
}
else
{
// The previous query is still not finished. Do not start a new query, this would
// create a SharpDX warning.
skipQuery = true;
}
}
else
{
node.OcclusionQuery = new OcclusionQuery(graphicsDevice);
}
if (!skipQuery)
{
node.IsQueryPending = true;
float totalPixels = viewportHeight * node.SunQuerySize;
totalPixels *= totalPixels;
node.QuerySize = totalPixels;
// Use a camera which looks at the sun.
// Get an relative up vector which is not parallel to the forward direction.
var lookAtUp = Vector3F.UnitY;
if (Vector3F.AreNumericallyEqual(sunDirection, lookAtUp))
lookAtUp = Vector3F.UnitZ;
Vector3F zAxis = -sunDirection;
Vector3F xAxis = Vector3F.Cross(lookAtUp, zAxis).Normalized;
Vector3F yAxis = Vector3F.Cross(zAxis, xAxis);
var lookAtSunView = new Matrix(xAxis.X, yAxis.X, zAxis.X, 0,
xAxis.Y, yAxis.Y, zAxis.Y, 0,
xAxis.Z, yAxis.Z, zAxis.Z, 0,
0, 0, 0, 1);
_parameterView.SetValue(lookAtSunView);
graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
graphicsDevice.DepthStencilState = DepthStencilState.None;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
// Create small quad shortly behind the near plane.
// Note: We use an "untranslated" view matrix, so we can ignore the camera position.
float width = (projection.Top - projection.Bottom) * node.SunQuerySize;
Vector3F right = sunDirection.Orthonormal1 * (width / 2);
Vector3F up = sunDirection.Orthonormal2 * (width / 2);
Vector3F center = sunDirection * (projection.Near * 1.0001f);
_queryGeometry[0] = center - up - right;
_queryGeometry[1] = center + up - right;
_queryGeometry[2] = center - up + right;
_queryGeometry[3] = center + up + right;
if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
_passOcclusionAlpha.Apply();
else
_passOcclusionRgb.Apply();
node.OcclusionQuery.Begin();
graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _queryGeometry, 0, 2,
VertexPosition.VertexDeclaration);
node.OcclusionQuery.End();
}
}
else
{
node.IsQueryPending = false;
node.SunOcclusion = 0;
}
Matrix viewUntranslated = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F(0));
_parameterView.SetValue(viewUntranslated);
// Render clouds.
graphicsDevice.BlendState = BlendState.AlphaBlend;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
if (context.IsHdrEnabled())
{
if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
_passCloudAlphaLinear.Apply();
else
_passCloudRgbLinear.Apply();
}
else
{
if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
_passCloudAlphaGamma.Apply();
else
_passCloudRgbGamma.Apply();
}
_submesh.Draw();
}
savedRenderState.Restore();
}
示例3: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
ThrowIfDisposed();
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (nodes.Count == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.BlendState = BlendState.AlphaBlend;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
// Camera properties
var cameraNode = context.CameraNode;
Matrix view = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F());
_parameterView.SetValue(view);
Matrix projection = cameraNode.Camera.Projection;
_parameterProjection.SetValue(projection);
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
for (int i = 0; i < numberOfNodes; i++)
{
var node = nodes[i] as GradientTextureSkyNode;
if (node == null)
continue;
// GradientTextureSkyNode is visible in current frame.
node.LastFrame = frame;
_parameterSunDirection.SetValue((Vector3)node.SunDirection);
_parameterTime.SetValue((float)node.TimeOfDay.TotalHours / 24);
_parameterColor.SetValue((Vector4)node.Color);
_parameterFrontTexture.SetValue(node.FrontTexture);
_parameterBackTexture.SetValue(node.BackTexture);
if (node.CieSkyStrength < Numeric.EpsilonF)
{
if (context.IsHdrEnabled())
_passLinear.Apply();
else
_passGamma.Apply();
}
else
{
var p = node.CieSkyParameters;
_parameterAbcd.SetValue(new Vector4(p.A, p.B, p.C, p.D));
_parameterEAndStrength.SetValue(new Vector2(p.E, node.CieSkyStrength));
if (context.IsHdrEnabled())
_passCieLinear.Apply();
else
_passCieGamma.Apply();
}
_submesh.Draw();
}
savedRenderState.Restore();
}
示例4: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
ThrowIfDisposed();
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (nodes.Count == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
// Camera properties
var cameraNode = context.CameraNode;
Matrix view = (Matrix)cameraNode.View;
Matrix projection = cameraNode.Camera.Projection;
Matrix viewProjection = view * projection;
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
// Blend additively over any cosmos textures.
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
graphicsDevice.BlendState = BlendState.Additive;
_effectParameterViewportSize.SetValue(new Vector2(context.Viewport.Width, context.Viewport.Height));
for (int i = 0; i < numberOfNodes; i++)
{
var node = nodes[i] as StarfieldNode;
if (node == null)
continue;
// SkyboxNode is visible in current frame.
node.LastFrame = frame;
if (node.Stars != null && node.Stars.Count > 0)
{
Matrix world = (Matrix)new Matrix44F(node.PoseWorld.Orientation, Vector3F.Zero);
_effectParameterWorldViewProjection.SetValue(world * viewProjection);
// In [ZFX] the star luminance of the precomputed star data is scaled with
// float const viewFactor = tan(fov);
// float const resolutionFactor = resolution / 1920.0f;
// float const luminanceScale = 1.0f / (viewFactor * viewFactor) * (resolutionFactor * resolutionFactor);
// We ignore this here, but we could add this factor to the Intensity parameter.
_effectParameterIntensity.SetValue((Vector3)node.Color);
if (context.IsHdrEnabled())
_effectPassLinear.Apply();
else
_effectPassGamma.Apply();
var mesh = GetStarfieldMesh(node, context);
mesh.Draw();
}
}
savedRenderState.Restore();
}
示例5: RenderHiDef
private void RenderHiDef(SkyboxNode node, RenderContext context)
{
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
graphicsDevice.BlendState = node.EnableAlphaBlending ? BlendState.AlphaBlend : BlendState.Opaque;
bool sourceIsFloatingPoint = TextureHelper.IsFloatingPointFormat(node.Texture.Format);
// Set sampler state. (Floating-point textures cannot use linear filtering. (XNA would throw an exception.))
if (sourceIsFloatingPoint)
graphicsDevice.SamplerStates[0] = SamplerState.PointClamp;
else
graphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;
var cameraNode = context.CameraNode;
Matrix44F view = cameraNode.View;
Matrix44F projection = cameraNode.Camera.Projection;
// Cube maps are left handed --> Sample with inverted z. (Otherwise, the
// cube map and objects or texts in it are mirrored.)
var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
Matrix33F orientation = node.PoseWorld.Orientation;
_parameterWorldViewProjection.SetValue((Matrix)(projection * view * new Matrix44F(orientation, Vector3F.Zero) * mirrorZ));
Vector4 color = node.EnableAlphaBlending
? new Vector4((Vector3)node.Color * node.Alpha, node.Alpha) // Premultiplied
: new Vector4((Vector3)node.Color, 1); // Opaque
_parameterColor.SetValue(color);
_textureParameter.SetValue(node.Texture);
if (node.Encoding is RgbEncoding)
{
_parameterTextureSize.SetValue(node.Texture.Size);
if (context.IsHdrEnabled())
_passRgbToRgb.Apply();
else
_passRgbToSRgb.Apply();
}
else if (node.Encoding is SRgbEncoding)
{
if (!sourceIsFloatingPoint)
{
if (context.IsHdrEnabled())
_passSRgbToRgb.Apply();
else
_passSRgbToSRgb.Apply();
}
else
{
throw new GraphicsException("sRGB encoded skybox cube maps must not use a floating point format.");
}
}
else if (node.Encoding is RgbmEncoding)
{
float max = GraphicsHelper.ToGamma(((RgbmEncoding)node.Encoding).Max);
_parameterRgbmMaxValue.SetValue(max);
if (context.IsHdrEnabled())
_passRgbmToRgb.Apply();
else
_passRgbmToSRgb.Apply();
}
else
{
throw new NotSupportedException("The SkyBoxRenderer supports only RgbEncoding, SRgbEncoding and RgbmEncoding.");
}
_submesh.Draw();
savedRenderState.Restore();
}
示例6: Render
/// <inheritdoc/>
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
ThrowIfDisposed();
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (nodes.Count == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.BlendState = BlendState.AlphaBlend;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
// Camera properties
var cameraNode = context.CameraNode;
Matrix view = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F());
_parameterView.SetValue(view);
Matrix projection = cameraNode.Camera.Projection;
_parameterProjection.SetValue(projection);
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
for (int i = 0; i < numberOfNodes; i++)
{
var node = nodes[i] as ScatteringSkyNode;
if (node == null)
continue;
// ScatteringSkyNode is visible in current frame.
node.LastFrame = frame;
_parameterSunDirection.SetValue((Vector3)node.SunDirection);
_parameterSunIntensity.SetValue((Vector3)(node.SunIntensity * node.SunColor));
_parameterRadii.SetValue(new Vector4(
node.AtmosphereHeight + node.PlanetRadius, // Atmosphere radius
node.PlanetRadius, // Ground radius
node.ObserverAltitude + node.PlanetRadius, // Observer radius
node.ScaleHeight)); // Absolute Scale height
_parameterNumberOfSamples.SetValue(node.NumberOfSamples);
_parameterBetaRayleigh.SetValue((Vector3)node.BetaRayleigh);
_parameterBetaMie.SetValue((Vector3)node.BetaMie);
_parameterGMie.SetValue(node.GMie);
_parameterTransmittance.SetValue(node.Transmittance);
if (node.BaseHorizonColor.IsNumericallyZero && node.BaseZenithColor.IsNumericallyZero)
{
// No base color.
if (context.IsHdrEnabled())
_passLinear.Apply();
else
_passGamma.Apply();
}
else
{
// Add base color.
_parameterBaseHorizonColor.SetValue((Vector4)new Vector4F(node.BaseHorizonColor, node.BaseColorShift));
_parameterBaseZenithColor.SetValue((Vector3)node.BaseZenithColor);
if (context.IsHdrEnabled())
_passLinearWithBaseColor.Apply();
else
_passGammaWithBaseColor.Apply();
}
_submesh.Draw();
}
savedRenderState.Restore();
}
示例7: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (numberOfNodes == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = _effect.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.DepthStencilState = DepthStencilState.None;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
var viewport = graphicsDevice.Viewport;
_parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
_parameterGBuffer0.SetValue(context.GBuffer0);
_parameterGBuffer1.SetValue(context.GBuffer1);
var cameraNode = context.CameraNode;
Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
var isHdrEnabled = context.IsHdrEnabled();
for (int i = 0; i < numberOfNodes; i++)
{
var lightNode = nodes[i] as LightNode;
if (lightNode == null)
continue;
var light = lightNode.Light as AmbientLight;
if (light == null)
continue;
// LightNode is visible in current frame.
lightNode.LastFrame = frame;
float hdrScale = isHdrEnabled ? light.HdrScale : 1;
_parameterLightColor.SetValue((Vector3)light.Color * light.Intensity * hdrScale);
_parameterHemisphericAttenuation.SetValue(light.HemisphericAttenuation);
Vector3F upWorld = lightNode.PoseWorld.ToWorldDirection(Vector3F.Up);
_parameterUp.SetValue((Vector3)upWorld);
if (lightNode.Clip != null)
{
var data = lightNode.RenderData as LightRenderData;
if (data == null)
{
data = new LightRenderData();
lightNode.RenderData = data;
}
data.UpdateClipSubmesh(context.GraphicsService, lightNode);
graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
_parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
_passClip.Apply();
data.ClipSubmesh.Draw();
graphicsDevice.DepthStencilState = lightNode.InvertClip
? GraphicsHelper.DepthStencilStateStencilEqual0
: GraphicsHelper.DepthStencilStateStencilNotEqual0;
graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
}
else
{
graphicsDevice.DepthStencilState = DepthStencilState.None;
}
_passLight.Apply();
graphicsDevice.DrawFullScreenQuad();
}
savedRenderState.Restore();
}
示例8: RenderHiDef
private void RenderHiDef(TextureCube texture, Matrix33F orientation, float exposure, RenderContext context)
{
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
graphicsDevice.BlendState = BlendState.Opaque;
var cameraNode = context.CameraNode;
Matrix44F view = cameraNode.View;
Matrix44F projection = cameraNode.Camera.Projection;
// Cube maps are left handed --> Sample with inverted z. (Otherwise, the
// cube map and objects or texts in it are mirrored.)
var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
_parameterWorldViewProjection.SetValue(
(Matrix)(projection * view * new Matrix44F(orientation, Vector3F.Zero) * mirrorZ));
_parameterExposure.SetValue(new Vector4(exposure, exposure, exposure, 1));
_textureParameter.SetValue(texture);
if (context.IsHdrEnabled())
_passLinear.Apply();
else
_passGamma.Apply();
_submesh.Draw();
savedRenderState.Restore();
}
示例9: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (numberOfNodes == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = _effect.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.DepthStencilState = DepthStencilState.None;
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
var viewport = graphicsDevice.Viewport;
_parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
_parameterGBuffer0.SetValue(context.GBuffer0);
_parameterGBuffer1.SetValue(context.GBuffer1);
var cameraNode = context.CameraNode;
Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;
var cameraPose = cameraNode.PoseWorld;
GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, _cameraFrustumFarCorners);
// Convert frustum far corners from view space to world space.
for (int i = 0; i < _cameraFrustumFarCorners.Length; i++)
_cameraFrustumFarCorners[i] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_cameraFrustumFarCorners[i]);
_parameterFrustumCorners.SetValue(_cameraFrustumFarCorners);
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
var isHdrEnabled = context.IsHdrEnabled();
for (int i = 0; i < numberOfNodes; i++)
{
var lightNode = nodes[i] as LightNode;
if (lightNode == null)
continue;
var light = lightNode.Light as DirectionalLight;
if (light == null)
continue;
// LightNode is visible in current frame.
lightNode.LastFrame = frame;
float hdrScale = isHdrEnabled ? light.HdrScale : 1;
_parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
_parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale);
Pose lightPose = lightNode.PoseWorld;
Vector3F lightDirectionWorld = lightPose.ToWorldDirection(Vector3F.Forward);
_parameterLightDirection.SetValue((Vector3)lightDirectionWorld);
bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null);
if (hasShadow)
{
switch (lightNode.Shadow.ShadowMaskChannel)
{
case 0: _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break;
case 1: _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break;
case 2: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break;
default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break;
}
_parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask);
}
bool hasTexture = (light.Texture != null);
if (hasTexture)
{
var textureProjection = Matrix44F.CreateOrthographicOffCenter(
-light.TextureOffset.X,
-light.TextureOffset.X + Math.Abs(light.TextureScale.X),
light.TextureOffset.Y,
light.TextureOffset.Y + Math.Abs(light.TextureScale.Y),
1, // Not relevant
2); // Not relevant.
var scale = Matrix44F.CreateScale(Math.Sign(light.TextureScale.X), Math.Sign(light.TextureScale.Y), 1);
_parameterTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * scale * textureProjection * lightPose.Inverse));
_parameterTexture.SetValue(light.Texture);
}
if (lightNode.Clip != null)
{
var data = lightNode.RenderData as LightRenderData;
if (data == null)
{
//.........这里部分代码省略.........
示例10: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
ThrowIfDisposed();
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
// Lens flares are used sparsely in most games. --> Early out, if possible.
int numberOfNodes = nodes.Count;
if (nodes.Count == 0)
return;
context.Validate(_spriteBatch);
context.ThrowIfCameraMissing();
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
bool hiDef = (graphicsDevice.GraphicsProfile == GraphicsProfile.HiDef);
// Camera properties
var cameraNode = context.CameraNode;
var cameraPose = cameraNode.PoseWorld;
Vector3F cameraForward = -cameraPose.Orientation.GetColumn(2); // 3rd column vector (negated)
Matrix44F view = cameraNode.View;
Matrix44F projection = cameraNode.Camera.Projection;
// The flares are positioned on a line from the origin through the center of
// the screen.
var viewport = graphicsDevice.Viewport;
Vector2F screenCenter = new Vector2F(viewport.Width / 2.0f, viewport.Height / 2.0f);
if (_transformParameter != null)
{
// ----- Original:
// Matrix matrix = (Matrix)(Matrix44F.CreateOrthographicOffCenter(0, viewport.Width, viewport.Height, 0, 0, 1)
// * Matrix44F.CreateTranslation(-0.5f, -0.5f, 0)); // Half-pixel offset (only for Direct3D 9).
// ----- Inlined:
Matrix matrix = new Matrix();
float oneOverW = 1.0f / viewport.Width;
float oneOverH = 1.0f / viewport.Height;
matrix.M11 = oneOverW * 2f;
matrix.M22 = -oneOverH * 2f;
matrix.M33 = -1f;
matrix.M44 = 1f;
#if MONOGAME
matrix.M41 = -1f;
matrix.M42 = 1f;
#else
// Direct3D 9: half-pixel offset
matrix.M41 = -oneOverW - 1f;
matrix.M42 = oneOverH + 1f;
#endif
_transformParameter.SetValue(matrix);
}
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
// Choose current effect technique: Linear vs. Gamma-corrected Writes.
if (_effect != null)
_effect.CurrentTechnique = context.IsHdrEnabled() ? _techniqueLinear : _techniqueGamma;
_spriteBatch.Begin(SpriteSortMode.Texture, BlendState.Additive, null, null, null, _effect);
for (int i = 0; i < numberOfNodes; i++)
{
var node = nodes[i] as LensFlareNode;
if (node == null)
continue;
var lensFlare = node.LensFlare;
float size, intensity;
if (hiDef)
{
// HiDef profile
object dummy;
cameraNode.ViewDependentData.TryGetValue(node, out dummy);
var renderData = dummy as OcclusionData;
if (renderData == null || renderData.VisiblePixels == 0)
continue;
lensFlare.OnGetSizeAndIntensity(node, context, renderData.VisiblePixels, renderData.TotalPixels, out size, out intensity);
}
else
{
// Reach profile
lensFlare.OnGetSizeAndIntensity(node, context, 0, 0, out size, out intensity);
}
if (size <= 0 || intensity < MinIntensity)
continue;
// LensFlareNode is visible in current frame.
node.LastFrame = frame;
// Project position to screen space.
Vector2F screenPosition;
//.........这里部分代码省略.........
示例11: Render
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
{
ThrowIfDisposed();
if (nodes == null)
throw new ArgumentNullException("nodes");
if (context == null)
throw new ArgumentNullException("context");
int numberOfNodes = nodes.Count;
if (nodes.Count == 0)
return;
context.Validate(_effect);
context.ThrowIfCameraMissing();
var graphicsDevice = context.GraphicsService.GraphicsDevice;
var savedRenderState = new RenderStateSnapshot(graphicsDevice);
graphicsDevice.RasterizerState = RasterizerState.CullNone;
graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
graphicsDevice.BlendState = BlendState.AlphaBlend;
// Camera properties
var cameraNode = context.CameraNode;
Pose cameraPose = cameraNode.PoseWorld;
Matrix view = (Matrix)new Matrix44F(cameraPose.Orientation.Transposed, new Vector3F(0));
Matrix projection = cameraNode.Camera.Projection;
_effectParameterViewProjection.SetValue(view * projection);
// Update SceneNode.LastFrame for all visible nodes.
int frame = context.Frame;
cameraNode.LastFrame = frame;
for (int i = 0; i < numberOfNodes; i++)
{
var node = nodes[i] as SkyObjectNode;
if (node == null)
continue;
// SkyObjectNode is visible in current frame.
node.LastFrame = frame;
// Get billboard axes from scene node pose.
Matrix33F orientation = node.PoseWorld.Orientation;
Vector3F right = orientation.GetColumn(0);
Vector3F up = orientation.GetColumn(1);
Vector3F normal = orientation.GetColumn(2);
Vector3F forward = -normal;
_effectParameterNormal.SetValue((Vector3)(normal));
// ----- Render object texture.
var texture = node.Texture;
if (texture != null)
{
_effectParameterUp.SetValue((Vector3)(up));
_effectParameterRight.SetValue((Vector3)(right));
_effectParameterSunLight.SetValue((Vector3)node.SunLight);
_effectParameterAmbientLight.SetValue(new Vector4((Vector3)node.AmbientLight, node.Alpha));
_effectParameterObjectTexture.SetValue(texture.TextureAtlas);
_effectParameterLightWrapSmoothness.SetValue(new Vector2(node.LightWrap, node.LightSmoothness));
_effectParameterSunDirection.SetValue((Vector3)node.SunDirection);
float halfWidthX = (float)Math.Tan(node.AngularDiameter.X / 2);
float halfWidthY = (float)Math.Tan(node.AngularDiameter.Y / 2);
// Texture coordinates of packed texture.
Vector2F texCoordLeftTop = texture.GetTextureCoordinates(new Vector2F(0, 0), 0);
Vector2F texCoordRightBottom = texture.GetTextureCoordinates(new Vector2F(1, 1), 0);
float texCoordLeft = texCoordLeftTop.X;
float texCoordTop = texCoordLeftTop.Y;
float texCoordRight = texCoordRightBottom.X;
float texCoordBottom = texCoordRightBottom.Y;
_effectParameterTextureParameters.SetValue(new Vector4(
(texCoordLeft + texCoordRight) / 2,
(texCoordTop + texCoordBottom) / 2,
1 / ((texCoordRight - texCoordLeft) / 2), // 1 / half extent
1 / ((texCoordBottom - texCoordTop) / 2)));
_vertices[0].Position = (Vector3)(forward - right * halfWidthX - up * halfWidthY);
_vertices[0].TextureCoordinate = new Vector2(texCoordLeft, texCoordBottom);
_vertices[1].Position = (Vector3)(forward - right * halfWidthX + up * halfWidthY);
_vertices[1].TextureCoordinate = new Vector2(texCoordLeft, texCoordTop);
_vertices[2].Position = (Vector3)(forward + right * halfWidthX - up * halfWidthY);
_vertices[2].TextureCoordinate = new Vector2(texCoordRight, texCoordBottom);
_vertices[3].Position = (Vector3)(forward + right * halfWidthX + up * halfWidthY);
_vertices[3].TextureCoordinate = new Vector2(texCoordRight, texCoordTop);
if (context.IsHdrEnabled())
_effectPassObjectLinear.Apply();
else
_effectPassObjectGamma.Apply();
graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _vertices, 0, 2);
}
// ----- Render glows.
if (node.GlowColor0.LengthSquared > 0 || node.GlowColor1.LengthSquared > 0)
//.........这里部分代码省略.........