本文整理汇总了C#中UnityEngine.Camera.AddCommandBuffer方法的典型用法代码示例。如果您正苦于以下问题:C# Camera.AddCommandBuffer方法的具体用法?C# Camera.AddCommandBuffer怎么用?C# Camera.AddCommandBuffer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UnityEngine.Camera
的用法示例。
在下文中一共展示了Camera.AddCommandBuffer方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: OnEnable
void OnEnable()
{
System.IO.Directory.CreateDirectory(m_output_directory);
m_cam = GetComponent<Camera>();
m_quad = FrameCapturerUtils.CreateFullscreenQuad();
m_mat_copy = new Material(m_sh_copy);
if (m_cam.targetTexture != null)
{
m_mat_copy.EnableKeyword("OFFSCREEN");
}
if (m_capture_framebuffer)
{
int tid = Shader.PropertyToID("_TmpFrameBuffer");
m_cb = new CommandBuffer();
m_cb.name = "ExrCapturer: copy frame buffer";
m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Point);
m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid);
// tid は意図的に開放しない
m_cam.AddCommandBuffer(CameraEvent.AfterEverything, m_cb);
m_frame_buffer = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf);
m_frame_buffer.wrapMode = TextureWrapMode.Repeat;
m_frame_buffer.Create();
}
if (m_capture_gbuffer &&
m_cam.renderingPath != RenderingPath.DeferredShading &&
(m_cam.renderingPath == RenderingPath.UsePlayerSettings && PlayerSettings.renderingPath != RenderingPath.DeferredShading))
{
Debug.Log("ExrCapturer: Rendering path must be deferred to use capture_gbuffer mode.");
m_capture_gbuffer = false;
}
if(m_capture_gbuffer)
{
m_gbuffer = new RenderTexture[4];
m_rt_gbuffer = new RenderBuffer[4];
for (int i = 0; i < m_gbuffer.Length; ++i)
{
m_gbuffer[i] = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf);
m_gbuffer[i].filterMode = FilterMode.Point;
m_gbuffer[i].Create();
m_rt_gbuffer[i] = m_gbuffer[i].colorBuffer;
}
{
RenderTextureFormat format = m_depth_format == DepthFormat.Half ? RenderTextureFormat.RHalf : RenderTextureFormat.RFloat;
m_depth = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, format);
m_depth.filterMode = FilterMode.Point;
m_depth.Create();
}
}
FrameCapturer.fcExrConfig conf;
conf.max_active_tasks = m_max_active_tasks;
m_exr = FrameCapturer.fcExrCreateContext(ref conf);
}
示例2: OnPreCullEvent
void OnPreCullEvent(Camera camera)
{
#if UNITY_EDITOR
if (Array.IndexOf<Camera>(SceneView.GetAllSceneCameras(), camera) >= 0) {
// シーンビューのカメラはチェック
if (this.drawInSceneView == false) {
return;
}
} else if (Camera.current.isActiveAndEnabled == false) {
// シーンビュー以外のエディタカメラは除外
return;
}
#endif
RenderPath path;
if (renderPaths.ContainsKey(camera)) {
// レンダーパスが有れば使う
path = renderPaths[camera];
} else {
// 無ければ作成
path = new RenderPath();
path.renderId = renderPaths.Count;
path.cameraEvent = cameraEvent;
// プラグイン描画するコマンドバッファを作成
path.commandBuffer = new CommandBuffer();
path.commandBuffer.IssuePluginEvent(Plugin.EffekseerGetRenderFunc(), path.renderId);
// コマンドバッファをカメラに登録
camera.AddCommandBuffer(path.cameraEvent, path.commandBuffer);
renderPaths.Add(camera, path);
}
// ビュー関連の行列を更新
Plugin.EffekseerSetProjectionMatrix(path.renderId, Utility.Matrix2Array(
GL.GetGPUProjectionMatrix(camera.projectionMatrix, false)));
Plugin.EffekseerSetCameraMatrix(path.renderId, Utility.Matrix2Array(
camera.worldToCameraMatrix));
}
示例3: AddCommandBuffersToCamera
private void AddCommandBuffersToCamera(Camera setCamera, CommandBuffer normalBuffer) {
//Need depth texture for depth aware upsample
setCamera.depthTextureMode |= DepthTextureMode.Depth;
if (m_copyTransmission != null && !HasCommandBuffer(setCamera, CameraEvent.AfterGBuffer, c_copyTransmissionBufferName)) {
setCamera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_copyTransmission);
}
if (normalBuffer != null && !HasCommandBuffer(setCamera, CameraEvent.BeforeLighting, c_normalBufferName)) {
setCamera.AddCommandBuffer(CameraEvent.BeforeLighting, normalBuffer);
}
if (m_releaseDeferredPlus != null && !HasCommandBuffer(setCamera, CameraEvent.AfterLighting, c_releaseDeferredBuffer)) {
setCamera.AddCommandBuffer(CameraEvent.AfterLighting, m_releaseDeferredPlus);
}
RefreshProperties();
}
示例4: OnEnable
//
protected virtual void OnEnable()
{
if (!CheckInstance()) { return; }
Initialize();
isSupported = CheckSupported();
if (!isSupported)
{
enabled = false;
Debug.LogWarning("HighlightingSystem : Highlighting System has been disabled due to unsupported Unity features on the current platform!");
return;
}
blurMaterial = new Material(materials[BLUR]);
// Set initial intensity in blur material
blurMaterial.SetFloat(ShaderPropertyID._Intensity, _blurIntensity);
renderBuffer = new CommandBuffer();
renderBuffer.name = renderBufferName;
cam = GetComponent<Camera>();
UpdateHighlightingBuffer();
// Force-rebuild renderBuffer
isDirty = true;
cam.AddCommandBuffer(queue, renderBuffer);
}
示例5: RefreshComBufs
public void RefreshComBufs(Camera cam, bool isSceneCam) {
if (cam && combufPreLight!=null && combufPostLight!=null) {
CommandBuffer[] combufsPreLight = cam.GetCommandBuffers(CameraEvent.BeforeReflections);
bool found = false;
foreach (CommandBuffer cbuf in combufsPreLight)
{
// instance comparison below DOESN'T work !!! Well, weird isn't it ???
//if (cbuf == combufPreLight)
if (cbuf.name == combufPreLight.name)
{
// got it already in command buffers
found = true;
break;
}
}
if (!found)
{
cam.AddCommandBuffer(CameraEvent.BeforeReflections, combufPreLight);
cam.AddCommandBuffer(CameraEvent.AfterLighting, combufPostLight);
if (isSceneCam)
{
sceneCamsWithBuffer.Add(cam);
}
}
}
}
示例6: Start
/// @cond
/// <summary>
/// Initialize the AR Screen.
/// </summary>
public void Start()
{
m_camera = GetComponent<Camera>();
TangoApplication tangoApplication = FindObjectOfType<TangoApplication>();
tangoApplication.OnDisplayChanged += _OnDisplayChanged;
m_arCameraPostProcess = gameObject.GetComponent<ARCameraPostProcess>();
if (tangoApplication != null)
{
tangoApplication.Register(this);
// If already connected to a service, then do initialization now.
if (tangoApplication.IsServiceConnected)
{
OnTangoServiceConnected();
}
CommandBuffer buf = VideoOverlayProvider.CreateARScreenCommandBuffer();
m_camera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, buf);
m_camera.AddCommandBuffer(CameraEvent.BeforeGBuffer, buf);
}
if (m_enableOcclusion)
{
TangoPointCloud pointCloud = FindObjectOfType<TangoPointCloud>();
if (pointCloud != null)
{
Renderer renderer = pointCloud.GetComponent<Renderer>();
renderer.enabled = true;
renderer.material.shader = m_occlusionShader;
pointCloud.m_updatePointsMesh = true;
}
else
{
Debug.Log("Point Cloud data is not available, occlusion is not possible.");
}
}
}
示例7: OnEnable
//
protected virtual void OnEnable()
{
Initialize();
if (!CheckSupported(true))
{
enabled = false;
Debug.LogError("HighlightingSystem : Highlighting System has been disabled due to unsupported Unity features on the current platform!");
return;
}
blurMaterial = new Material(materials[BLUR]);
cutMaterial = new Material(materials[CUT]);
compMaterial = new Material(materials[COMP]);
// Set initial intensity in blur material
blurMaterial.SetFloat(ShaderPropertyID._Intensity, _blurIntensity);
renderBuffer = new CommandBuffer();
renderBuffer.name = renderBufferName;
cam = GetComponent<Camera>();
cameras.Add(cam);
cam.AddCommandBuffer(queue, renderBuffer);
if (_blitter != null)
{
_blitter.Register(this);
}
}
示例8: Create
/// <summary>
/// Return a new command buffer.
/// This will be called the first time
/// the mesh is rendered for each camera
/// that renders the ocean.
/// </summary>
public override CommandBuffer Create(Camera cam)
{
CommandBuffer cmd = new CommandBuffer();
cmd.name = "Ceto DepthGrab Cmd: " + cam.name;
//int width = cam.pixelWidth;
//int height = cam.pixelHeight;
//int scale = ResolutionToNumber(Resolution);
//width /= scale;
//height /= scale;
RenderTextureFormat format;
//screen grab currently disabled.
/*
if (cam.hdr)
format = RenderTextureFormat.ARGBHalf;
else
format = RenderTextureFormat.ARGB32;
//Copy screen into temporary RT.
int grabID = Shader.PropertyToID("Ceto_GrabCopyTexture");
cmd.GetTemporaryRT(grabID, width, height, 0, FilterMode.Bilinear, format, RenderTextureReadWrite.Default);
cmd.Blit(BuiltinRenderTextureType.CurrentActive, grabID);
cmd.SetGlobalTexture(GrabName, grabID);
*/
if (SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RFloat))
format = RenderTextureFormat.RFloat;
else
format = RenderTextureFormat.RHalf;
//Copy depths into temporary RT.
int depthID = Shader.PropertyToID("Ceto_DepthCopyTexture");
cmd.GetTemporaryRT(depthID, cam.pixelWidth, cam.pixelHeight, 0, FilterMode.Point, format, RenderTextureReadWrite.Linear);
cmd.Blit(BuiltinRenderTextureType.CurrentActive, depthID, m_copyDepthMat, 0);
cmd.SetGlobalTexture(DepthName, depthID);
cam.AddCommandBuffer(Event, cmd);
CommandData data = new CommandData();
data.command = cmd;
data.width = cam.pixelWidth;
data.height = cam.pixelHeight;
if (m_data.ContainsKey(cam))
m_data.Remove(cam);
m_data.Add(cam, data);
return cmd;
}
示例9: OnEnable
void OnEnable()
{
m_cam = GetComponent<Camera>();
m_quad = FrameCapturerUtils.CreateFullscreenQuad();
m_mat_copy = new Material(m_sh_copy);
if (m_cam.targetTexture != null)
{
m_mat_copy.EnableKeyword("OFFSCREEN");
}
{
int tid = Shader.PropertyToID("_TmpFrameBuffer");
m_cb = new CommandBuffer();
m_cb.name = "GifCapturer: copy frame buffer";
m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Point);
m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid);
// tid は意図的に開放しない
m_cam.AddCommandBuffer(CameraEvent.AfterEverything, m_cb);
}
ResetRecordingState();
}
示例10: ReconstructLightBuffers
void ReconstructLightBuffers(Camera camera, bool toCull)
{
CommandBuffer cameraBuffer = null;
buffers.TryGetValue (camera, out cameraBuffer);
if (cameraBuffer != null) {
cameraBuffer.Clear ();
} else {
cameraBuffer = new CommandBuffer ();
cameraBuffer.name = "Deferred custom lights";
camera.AddCommandBuffer (CameraEvent.BeforeImageEffectsOpaque, cameraBuffer);
buffers.Add (camera, cameraBuffer);
}
var system = PipLightSystem.instance;
Bounds bounds = new Bounds ();
Plane[] frustrumPlanes = null;
if (toCull) {
frustrumPlanes = GeometryUtility.CalculateFrustumPlanes (camera);
}
foreach (var light in system.m_Lights) {
bool toRenderThisLight = true;
light.UpdateLOD ();
if (toCull) {
bounds.center = light.transform.position;
bounds.extents = Vector3.one * light.range;
toRenderThisLight = GeometryUtility.TestPlanesAABB (frustrumPlanes, bounds);
}
if (toRenderThisLight) {
light.UpdateIfNeeded ();
cameraBuffer.DrawMesh (
lightSphereMesh,
Matrix4x4.TRS (light.transform.position, Quaternion.identity, Vector3.one * light.range * 2f),
GetMaterial (light),
0,
0,
light.GetMaterialPropertyBlock ()
);
}
}
}