本文整理汇总了C#中UnityEngine.RenderTexture.Release方法的典型用法代码示例。如果您正苦于以下问题:C# RenderTexture.Release方法的具体用法?C# RenderTexture.Release怎么用?C# RenderTexture.Release使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UnityEngine.RenderTexture
的用法示例。
在下文中一共展示了RenderTexture.Release方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: OnRenderImage
void OnRenderImage(RenderTexture source, RenderTexture destination)
{
//Debug.Log ("s" + source.GetInstanceID () + " " + destination.GetInstanceID ());
material.SetFloat("_ValidDepthThreshold", ValidDepthThreshold);
material.SetFloat("_EdgeLuminanceThreshold", EdgeLuminanceThreshold);
material.SetFloat("_SharpEdge", SharpEdge);
//var rt = RenderTexture.GetTemporary (source.width / 2, source.height / 2);
//Graphics.Blit(source, rt2, material, 0);
var rt2 = new RenderTexture (source.width, source.height, 0);
rt2.enableRandomWrite = false;
//rt2.filterMode = FilterMode.Point;
rt2.filterMode = FilterMode.Point;
Graphics.Blit(source, rt2, material, 0);
//var rt3 = new RenderTexture (source.width/2, source.height/2, 0);
//rt3.enableRandomWrite = false;
//rt3.filterMode = FilterMode.Point;
//Graphics.Blit(rt2, rt3, material, 0);
Graphics.Blit(rt2, destination, material, 1);
rt2.Release ();
//rt3.Release ();
}
示例2: CreateRenderTexture
void CreateRenderTexture(Sprite sprite)
{
GameObject.Find("background-1").GetComponent<SpriteRenderer>().sprite = sprite;
int width = (int)GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.x;
int height = (int)GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.y;
Vector3 nsize = Camera.main.WorldToScreenPoint(new Vector3(width, height, 0));
nsize.x *= 2.0f;
//nsize.x *= 1.33f;
nsize.y *= 2.0f;
//nsize.y *= 1.33f;
tex = new RenderTexture((int)nsize.x, (int)nsize.y, 1);
tex.useMipMap = false;
tex.filterMode = FilterMode.Point;
tex.antiAliasing = 1;
tex.Create();
cam = GetComponent<Camera>();
float previousOrthoSize = cam.orthographicSize;
float previousAspect = cam.aspect;
RenderTexture previousRenderTexture = cam.targetTexture;
Texture2D image = new Texture2D((int)nsize.x - 50, (int)nsize.y);
cam.targetTexture = tex;
float scale = 1.0f / (cam.orthographicSize / GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.y / 2.0f);
cam.orthographicSize = scale;
cam.aspect = GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.x / GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.y;
RenderTexture past = RenderTexture.active;
RenderTexture.active = tex;
cam.Render();
image.ReadPixels(new Rect(25, 0, (int)nsize.x - 50, (int)nsize.y), 0, 0);
image.Apply();
cam.targetTexture = previousRenderTexture;
cam.orthographicSize = previousOrthoSize;
cam.aspect = previousAspect;
RenderTexture.active = past;
tex.Release();
enhancedBackgroundImages.Add(Sprite.Create(image, new Rect(0, 0, image.width, image.height), new Vector2(0.5f, 0.5f)));
}
示例3: UpdateFaceTexture
private void UpdateFaceTexture() {
if(_targetCube == null) return;
if(faceTexture == null || faceTexture.width != _targetCube.width) {
if(faceTexture) Texture2D.DestroyImmediate(faceTexture);
faceTexture = new Texture2D(_targetCube.width, _targetCube.width, TextureFormat.ARGB32, true, false);
//attempt to make an HDR render texture for RGBM capture
RT = RenderTexture.GetTemporary(_targetCube.width, _targetCube.width, 24, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
RT.Release();
RT.isCubemap = false;
RT.useMipMap = false;
RT.generateMips = false;
RT.Create();
if(!RT.IsCreated() && !RT.Create()) {
Debug.LogWarning("Failed to create HDR RenderTexture, capturing in LDR mode.");
RenderTexture.ReleaseTemporary(RT);
RT = null;
}
}
}
示例4: ReadFromRenderTexture
//This will read the values in tex into data array. Data must be in the range 0 - 0.9999
static public void ReadFromRenderTexture(RenderTexture tex, int channels, float[] data)
{
if(tex == null)
{
Debug.Log("EncodeFloat::ReadFromRenderTexture - RenderTexture is null");
return;
}
if(data == null)
{
Debug.Log("EncodeFloat::ReadFromRenderTexture - Data is null");
return;
}
if(channels < 1 || channels > 4)
{
Debug.Log("EncodeFloat::ReadFromRenderTexture - Channels must be 1, 2, 3, or 4");
return;
}
if(m_encodeToFloat == null)
{
Shader shader = Shader.Find("EncodeFloat/EncodeToFloat");
if(shader == null)
{
Debug.Log("EncodeFloat::ReadFromRenderTexture - could not find shader EncodeFloat/EncodeToFloat. Did you change the shaders name?");
return;
}
m_encodeToFloat = new Material(shader);
}
int w = tex.width;
int h = tex.height;
RenderTexture encodeTex = new RenderTexture(w, h, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear);
encodeTex.filterMode = FilterMode.Point;
Texture2D readTex = new Texture2D(w, h, TextureFormat.ARGB32, false, true);
Vector4 factor = new Vector4(1.0f, 1.0f/255.0f, 1.0f/65025.0f, 1.0f/160581375.0f);
for(int i = 0; i < channels; i++)
{
//enocde data in tex into encodeTex
Graphics.Blit(tex, encodeTex, m_encodeToFloat, i);
//Read encoded values into a normal texture where we can retrive them
RenderTexture.active = encodeTex;
readTex.ReadPixels(new Rect(0,0,w,h),0,0);
readTex.Apply();
RenderTexture.active = null;
//decode each pixel in readTex into a single float for the current channel
for(int x = 0; x < w; x++)
{
for(int y = 0; y < h; y++)
{
data[(x+y*w)*channels+i] = Vector4.Dot(readTex.GetPixel(x,y), factor);
}
}
}
encodeTex.Release ();
UnityEngine.Object.Destroy (encodeTex);
UnityEngine.Object.Destroy (readTex);
}
示例5: GenerateCaptureImage
/// <summary>
/// キャプチャしたイメージを Texture2D として取得します。
/// </summary>
/// <returns>
/// キャプチャされたイメージが与えられた Texture2D 。
/// </returns>
private Texture2D GenerateCaptureImage()
{
Camera fixedCamera;
if (this.camera == null)
{
fixedCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent<Camera>();
}
else
{
fixedCamera = this.camera;
}
int fixedWidth = this.imageWidth;
int fixedHiehgt = this.imageHeight;
int bit = 32;
int[] gameViewResolution = GetGameViewResolution();
if (fixedWidth == 0)
{
fixedWidth = gameViewResolution[0];
}
if (fixedHiehgt == 0)
{
fixedHiehgt = gameViewResolution[1];
}
fixedWidth *= this.imageScale;
fixedHiehgt *= this.imageScale;
Color presetBackgroundColor = fixedCamera.backgroundColor;
CameraClearFlags presetClearFlags = fixedCamera.clearFlags;
if (this.enableBackgroundAlpha)
{
fixedCamera.backgroundColor = Color.clear;
fixedCamera.clearFlags = CameraClearFlags.SolidColor;
}
RenderTexture presetRenderTexture = fixedCamera.targetTexture;
// カメラに出力用の RenderTexture を設定してレンダリングを実行し、
// その情報を Texture2D に保存して返す。
RenderTexture outputRenderTexture
= new RenderTexture(fixedWidth, fixedHiehgt, bit);
fixedCamera.targetTexture = outputRenderTexture;
Texture2D captureImage = new Texture2D(fixedWidth,
fixedHiehgt,
TextureFormat.ARGB32,
false);
fixedCamera.Render();
RenderTexture.active = outputRenderTexture;
captureImage.ReadPixels
(new Rect(0, 0, fixedWidth, fixedHiehgt), 0, 0);
// 設定を元に戻します。
fixedCamera.backgroundColor = presetBackgroundColor;
fixedCamera.clearFlags = presetClearFlags;
fixedCamera.targetTexture = presetRenderTexture;
// 解放してから終了します。
RenderTexture.active = null;
outputRenderTexture.Release();
DestroyImmediate(outputRenderTexture);
return captureImage;
}
示例6: workerMethod
//.........这里部分代码省略.........
tempTexture.ReadPixels(new Rect(0, 0, destinationTexture.width, destinationTexture.height), 0, 0, umaGenerator.convertMipMaps);
RenderTexture.active = null;
}
else
{
// figures that ReadPixels works differently on OpenGL and DirectX, someday this code will break because Unity fixes this bug!
if (IsOpenGL())
{
for (int x = 0; x < xblocks; x++)
{
for (int y = 0; y < yblocks; y++)
{
RenderTexture.active = destinationTexture;
tempTexture.ReadPixels(new Rect(x * 512, y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps);
RenderTexture.active = null;
yield return 8;
}
}
}
else
{
for (int x = 0; x < xblocks; x++)
{
for (int y = 0; y < yblocks; y++)
{
RenderTexture.active = destinationTexture;
tempTexture.ReadPixels(new Rect(x * 512, destinationTexture.height - 512 - y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps);
RenderTexture.active = null;
yield return 8;
}
}
}
}
resultingTextures[textureType] = tempTexture as Texture;
renderCamera.targetTexture = null;
RenderTexture.active = null;
destinationTexture.Release();
UnityEngine.GameObject.DestroyImmediate(destinationTexture);
umaGenerator.textureMerge.gameObject.SetActive(false);
yield return 6;
tempTexture = resultingTextures[textureType] as Texture2D;
tempTexture.Apply();
tempTexture.wrapMode = TextureWrapMode.Repeat;
tempTexture.filterMode = FilterMode.Bilinear;
resultingTextures[textureType] = tempTexture;
atlas.material.SetTexture(slotData.asset.material.channels[textureType].materialPropertyName, tempTexture);
#endregion
}
else
{
destinationTexture.filterMode = FilterMode.Bilinear;
destinationTexture.wrapMode = TextureWrapMode.Repeat;
resultingTextures[textureType] = destinationTexture;
atlas.material.SetTexture(slotData.asset.material.channels[textureType].materialPropertyName, destinationTexture);
}
umaGenerator.textureMerge.gameObject.SetActive(false);
break;
}
case UMAMaterial.ChannelType.MaterialColor:
{
atlas.material.SetColor(slotData.asset.material.channels[textureType].materialPropertyName, atlas.materialFragments[0].baseColor);
break;
}
case UMAMaterial.ChannelType.TintedTexture:
{
for (int i = 0; i < atlas.materialFragments.Count; i++)
{
var fragment = atlas.materialFragments[i];
if (fragment.isRectShared) continue;
for (int j = 0; j < fragment.baseTexture.Length; j++)
{
if (fragment.baseTexture[j] != null)
{
atlas.material.SetTexture(slotData.asset.material.channels[j].materialPropertyName, fragment.baseTexture[j]);
if (j == 0)
{
atlas.material.color = fragment.baseColor;
}
}
}
foreach (var overlay in fragment.overlays)
{
for (int j = 0; j < overlay.textureList.Length; j++)
{
if (overlay.textureList[j] != null)
{
atlas.material.SetTexture(slotData.asset.material.channels[j].materialPropertyName, overlay.textureList[j]);
}
}
}
}
break;
}
}
}
atlas.resultingAtlasList = resultingTextures;
}
}
示例7: SafeDestroyRenderTexture
void SafeDestroyRenderTexture( ref RenderTexture rt )
{
if ( rt != null )
{
RenderTexture.active = null;
rt.Release();
DestroyImmediate( rt );
rt = null;
}
}
示例8: ReleaseAndDestroy
public static void ReleaseAndDestroy(RenderTexture tex)
{
if (tex == null)
{
return;
}
tex.Release();
UnityEngine.Object.Destroy(tex);
}
示例9: Update
void Update()
{
for (int i = 0; i < ripples.Count; i++) {
Ripple tmp = ripples[i];
tmp.positionScale.z *= tmp.scaleSpeed;
tmp.positionScale.w *= tmp.scaleSpeed;
tmp.rippleStrength *= strengthDecay;
if(ripples[i].rippleStrength <= 0.1f)
{
ripples.Remove(ripples[i]);
}else{
ripples[i] = tmp;
}
}
RenderTexture tmpTexture = new RenderTexture (rippleOutput.width, rippleOutput.height, rippleOutput.depth);
Graphics.SetRenderTarget(rippleOutput);
if (ripples.Count == 0) {
Graphics.Blit(rippleOutput,rippleOutput,rippleRenderer.sharedMaterial,0);
}
//Graphics.Blit(rippleOutput,rippleOutput,renderer.sharedMaterial,1);
//TODO: Push this over to computeshader to remove blit calls
rippleRenderer.sharedMaterial.SetFloat("_Width", 128);
rippleRenderer.sharedMaterial.SetFloat("_Height", 128);
for (int i = 0; i < ripples.Count; ++i) {
rippleRenderer.sharedMaterial.SetVector("_BaseRipplePosition", ripples[i].positionScale);
rippleRenderer.sharedMaterial.SetFloat("_BaseRippleStrength", ripples[i].rippleStrength);
Graphics.Blit(rippleOutput,rippleOutput,rippleRenderer.sharedMaterial,1);
}
tmpTexture.Release ();
}
示例10: GenerateToTexture
/// <summary>
/// Generates a texture containing the given graph's noise output.
/// If this is being called very often, create a permanent render target and material and
/// use the other version of this method instead for much better performance.
/// If an error occurred, outputs to the Unity debug console and returns "null".
/// </summary>
/// <param name="outputComponents">
/// The texture output.
/// For example, pass "rgb" or "xyz" to output the noise into the red, green, and blue channels
/// but not the alpha channel.
/// </param>
/// <param name="defaultColor">
/// The color (generally 0-1) of the color components which aren't set by the noise.
/// </param>
public static Texture2D GenerateToTexture(Graph g, GraphParamCollection c, int width, int height,
string outputComponents, float defaultColor,
TextureFormat format = TextureFormat.RGBAFloat)
{
//Generate a shader from the graph and have Unity compile it.
string shaderPath = Path.Combine(Application.dataPath, "gpuNoiseShaderTemp.shader");
Shader shader = SaveShader(g, shaderPath, "TempGPUNoiseShader", outputComponents, defaultColor);
if (shader == null)
{
return null;
}
//Render the shader's output into a render texture and copy the data to a Texture2D.
RenderTexture target = new RenderTexture(width, height, 16, RenderTextureFormat.ARGBFloat);
target.Create();
Texture2D resultTex = new Texture2D(width, height, format, false, true);
//Create the material and set its parameters.
Material mat = new Material(shader);
c.SetParams(mat);
GraphUtils.GenerateToTexture(target, mat, resultTex);
//Clean up.
target.Release();
if (!AssetDatabase.DeleteAsset(StringUtils.GetRelativePath(shaderPath, "Assets")))
{
Debug.LogError("Unable to delete temp file: " + shaderPath);
}
return resultTex;
}
示例11: CreateDistanceField
/*****/
private void CreateDistanceField()
{
var size = 128;
var pdbName = "MA_matrix_G1";
string path = "Assets/Resources/3D Textures/" + pdbName + ".asset";
Texture3D tmp = (Texture3D)AssetDatabase.LoadAssetAtPath(path, typeof(Texture3D));
if (tmp)
{
_volumeTexture = tmp;
}
else
{
RenderTexture _distanceFieldRT;
_distanceFieldRT = new RenderTexture(size, size, 0, RenderTextureFormat.R8);
_distanceFieldRT.volumeDepth = size;
_distanceFieldRT.isVolume = true;
_distanceFieldRT.isPowerOfTwo = true;
_distanceFieldRT.enableRandomWrite = true;
_distanceFieldRT.filterMode = FilterMode.Trilinear;
_distanceFieldRT.name = pdbName;
_distanceFieldRT.hideFlags = HideFlags.HideAndDontSave;
_distanceFieldRT.generateMips = true;
_distanceFieldRT.useMipMap = true;
_distanceFieldRT.Create();
var atomSpheres = PdbLoader.LoadAtomSpheres(pdbName);
var atomSphereGPUBuffer = new ComputeBuffer(atomSpheres.Count, sizeof(float) * 4, ComputeBufferType.Default);
atomSphereGPUBuffer.SetData(atomSpheres.ToArray());
Graphics.SetRenderTarget(_distanceFieldRT);
GL.Clear(true, true, new Color(0, 0, 0));
var createDistanceFieldCS = Resources.Load("Compute Shaders/CreateDistanceField") as ComputeShader;
createDistanceFieldCS.SetInt("_GridSize", size);
createDistanceFieldCS.SetInt("_NumAtoms", atomSpheres.Count);
createDistanceFieldCS.SetBuffer(0, "_SpherePositions", atomSphereGPUBuffer);
createDistanceFieldCS.SetTexture(0, "_VolumeTexture", _distanceFieldRT);
createDistanceFieldCS.Dispatch(0, Mathf.CeilToInt(size / 10.0f), Mathf.CeilToInt(size / 10.0f), Mathf.CeilToInt(size / 10.0f));
atomSphereGPUBuffer.Release();
//****
var flatSize = size * size * size;
var voxelGPUBuffer = new ComputeBuffer(flatSize, sizeof(float));
var readVoxelsCS = Resources.Load("Compute Shaders/ReadVoxels") as ComputeShader;
readVoxelsCS.SetInt("_VolumeSize", size);
readVoxelsCS.SetBuffer(0, "_VoxelBuffer", voxelGPUBuffer);
readVoxelsCS.SetTexture(0, "_VolumeTexture", _distanceFieldRT);
readVoxelsCS.Dispatch(0, size, size, size);
var voxelCPUBuffer = new float[flatSize];
voxelGPUBuffer.GetData(voxelCPUBuffer);
var volumeColors = new Color[flatSize];
for (int i = 0; i < flatSize; i++)
{
volumeColors[i] = new Color(0, 0, 0, voxelCPUBuffer[i]);
}
var texture3D = new Texture3D(size, size, size, TextureFormat.Alpha8, true);
texture3D.SetPixels(volumeColors);
texture3D.wrapMode = TextureWrapMode.Clamp;
texture3D.anisoLevel = 0;
texture3D.Apply();
AssetDatabase.CreateAsset(texture3D, path);
AssetDatabase.SaveAssets();
// Print the path of the created asset
Debug.Log(AssetDatabase.GetAssetPath(texture3D));
voxelGPUBuffer.Release();
_distanceFieldRT.Release();
DestroyImmediate(_distanceFieldRT);
_volumeTexture = texture3D;
}
}
示例12: capture
public bool capture(Texture targetCube, Vector3 position, Quaternion rotation, bool HDR, bool linear, bool convolve) {
if(targetCube == null) return false;
bool tempRT = false;
if(cubeRT == null) {
tempRT = true;
//everything's captured to an HDR buffer right now
cubeRT = RenderTexture.GetTemporary(targetCube.width, targetCube.width, 24, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
cubeRT.Release();
cubeRT.isCubemap = true;
cubeRT.useMipMap = true;
cubeRT.generateMips = true;
cubeRT.Create();
if(!cubeRT.IsCreated() && !cubeRT.Create()) {
cubeRT = RenderTexture.GetTemporary(targetCube.width, targetCube.width, 24, RenderTextureFormat.Default, RenderTextureReadWrite.Linear);
cubeRT.Release();
cubeRT.isCubemap = true;
cubeRT.useMipMap = true;
cubeRT.generateMips = true;
cubeRT.Create();
}
}
if(!cubeRT.IsCreated() && !cubeRT.Create()) return false;
GameObject go = null;
Camera cam = null;
go = new GameObject("_temp_probe");
cam = go.AddComponent<Camera>();
mset.SkyManager mgr = mset.SkyManager.Get();
if(mgr && mgr.ProbeCamera) {
cam.CopyFrom (mgr.ProbeCamera);
} else if(Camera.main) {
cam.CopyFrom (Camera.main);
}
cam.renderingPath = renderPath;
cam.useOcclusionCulling = false;
cam.hdr = true;
go.hideFlags = HideFlags.HideInHierarchy | HideFlags.HideAndDontSave;
go.SetActive(true);
go.transform.position = position;
//capture entire scene in HDR
Shader.SetGlobalVector("_UniformOcclusion", this.exposures);
cam.RenderToCubemap(cubeRT);
Shader.SetGlobalVector("_UniformOcclusion", Vector4.one);
Matrix4x4 matrix = Matrix4x4.identity;
matrix.SetTRS(position, rotation, Vector3.one);
//render cubeRT converting it to RGBM
Material skyMat = new Material(Shader.Find("Hidden/Marmoset/RGBM Cube"));
bool dstRGBM = HDR;
bool srcRGBM = false;
copy_internal(targetCube, cubeRT, dstRGBM, srcRGBM, linear, cam, skyMat, matrix);
if( convolve ) {
convolve_internal(targetCube, cubeRT, HDR, false, linear, cam, skyMat, matrix);
}
//make sure the old sky and matrix vars are bound again
if(mgr) mgr.GlobalSky = mgr.GlobalSky;
Material.DestroyImmediate(skyMat);
GameObject.DestroyImmediate(go);
#if UNITY_5
if(tempRT) GameObject.DestroyImmediate(cubeRT);
#else
if(tempRT) RenderTexture.ReleaseTemporary(cubeRT);
#endif
return true;
}
示例13: InitRenderTexture
void InitRenderTexture(ref RenderTexture rt, int width, int height, int depth, RenderTextureFormat format, bool temp = true)
{
if (temp)
{
rt = RenderTexture.GetTemporary(width, height, depth, format);
}
else
{
if (rt != null)
{
if (rt.width == width && rt.height == height && rt.depth == depth && rt.format == format)
return;
rt.Release();
DestroyImmediate(rt);
}
rt = new RenderTexture(width, height, depth, format);
rt.hideFlags = HideFlags.HideAndDontSave;
}
}
示例14: Release
public static void Release(RenderTexture tex)
{
if (tex == null)
{
return;
}
tex.Release();
}
示例15: workerMethod
//.........这里部分代码省略.........
renderCamera = umaGenerator.textureMerge.myCamera;
Vector3 tempPosition = renderCamera.transform.position;
renderCamera.orthographicSize = umaData.atlasList.atlas[atlasIndex].cropResolution.y / umaGenerator.atlasResolution;
renderCamera.transform.position = tempPosition + (-Vector3.right * (1 - umaData.atlasList.atlas[atlasIndex].cropResolution.x / umaGenerator.atlasResolution)) + (-Vector3.up * (1 - renderCamera.orthographicSize));
renderCamera.targetTexture = destinationTexture;
renderCamera.Render();
renderCamera.transform.position = tempPosition;
renderCamera.active = false;
renderCamera.targetTexture = null;
yield return 25;
if (umaGenerator.convertRenderTexture)
{
Texture2D tempTexture;
tempTexture = new Texture2D(destinationTexture.width, destinationTexture.height, TextureFormat.ARGB32, umaGenerator.convertMipMaps);
int xblocks = destinationTexture.width / 512;
int yblocks = destinationTexture.height / 512;
if (xblocks == 0 || yblocks == 0)
{
RenderTexture.active = destinationTexture;
tempTexture.ReadPixels(new Rect(0, 0, destinationTexture.width, destinationTexture.height), 0, 0, umaGenerator.convertMipMaps);
RenderTexture.active = null;
}
else
{
// figures that ReadPixels works differently on OpenGL and DirectX, someday this code will break because Unity fixes this bug!
if (IsOpenGL())
{
for (int x = 0; x < xblocks; x++)
{
for (int y = 0; y < yblocks; y++)
{
RenderTexture.active = destinationTexture;
tempTexture.ReadPixels(new Rect(x * 512, y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps);
RenderTexture.active = null;
yield return 8;
}
}
}
else
{
for (int x = 0; x < xblocks; x++)
{
for (int y = 0; y < yblocks; y++)
{
RenderTexture.active = destinationTexture;
tempTexture.ReadPixels(new Rect(x * 512, destinationTexture.height - 512 - y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps);
RenderTexture.active = null;
yield return 8;
}
}
}
}
resultingTextures[textureType] = tempTexture as Texture;
renderCamera.targetTexture = null;
RenderTexture.active = null;
destinationTexture.Release();
UnityEngine.GameObject.DestroyImmediate(destinationTexture);
umaGenerator.textureMerge.gameObject.SetActive(false);
yield return 6;
tempTexture = resultingTextures[textureType] as Texture2D;
tempTexture.Apply();
resultingTextures[textureType] = tempTexture;
}
else
{
destinationTexture.filterMode = FilterMode.Bilinear;
resultingTextures[textureType] = destinationTexture;
}
umaGenerator.textureMerge.gameObject.SetActive(false);
}
else
{
}
}
for (int textureModuleIndex = 0; textureModuleIndex < textureModuleList.Length; textureModuleIndex++)
{
textureModuleList[textureModuleIndex].gameObject.SetActive(false);
// UnityEngine.Object.DestroyImmediate(textureModuleList[textureModuleIndex].gameObject.renderer.material);
// UnityEngine.Object.DestroyImmediate(textureModuleList[textureModuleIndex].gameObject);
}
umaData.atlasList.atlas[atlasIndex].resultingAtlasList = resultingTextures;
umaData.atlasList.atlas[atlasIndex].materialSample = UnityEngine.Object.Instantiate(umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[0].source.materialSample) as Material;
umaData.atlasList.atlas[atlasIndex].materialSample.name = umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[0].source.materialSample.name;
for (int finalTextureType = 0; finalTextureType < umaGenerator.textureNameList.Length; finalTextureType++)
{
if (umaData.atlasList.atlas[atlasIndex].materialSample.HasProperty(umaGenerator.textureNameList[finalTextureType]))
{
umaData.atlasList.atlas[atlasIndex].materialSample.SetTexture(umaGenerator.textureNameList[finalTextureType], resultingTextures[finalTextureType]);
}
}
}
}