本文整理汇总了C#中WebGLRenderingContext.flush方法的典型用法代码示例。如果您正苦于以下问题:C# WebGLRenderingContext.flush方法的具体用法?C# WebGLRenderingContext.flush怎么用?C# WebGLRenderingContext.flush使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类WebGLRenderingContext
的用法示例。
在下文中一共展示了WebGLRenderingContext.flush方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Application
//.........这里部分代码省略.........
//canvasPY.rotate((float)(-Math.PI / 2));
canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
//canvasPY.restore();
renderer0.render(scene, cameraNY);
//canvasNY.save();
//canvasNY.translate(size, 0);
//canvasNY.rotate((float)(Math.PI / 2));
canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
//canvasNY.restore();
// ?
#endregion
//renderer0.render(scene, cameraPX);
//rendererPY.render(scene, cameraPY);
// at this point we should be able to render the sphere texture
//public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;
//var cube0 = new IHTMLImage[] {
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
//};
new[] {
canvasPX, canvasNX,
canvasPY, canvasNY,
canvasPZ, canvasNZ
}.WithEachIndex(
(img, index) =>
{
gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);
//gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
// http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true
// https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);
}
);
pass.Paint_Image(
0,
0,
0,
0,
0
//,
// gl_FragCoord
// cannot be scaled, and can be referenced directly.
// need another way to scale
//zoom: 0.3f
);
//paintsw.Stop();
// what does it do?
gl.flush();
};
}
);
#endregion
Console.WriteLine("do you see it?");
}
示例2: Application
//.........这里部分代码省略.........
new IHTMLPre { "at callback" }.AttachToDocument();
},
obj: null,
forceMuted: false,
forcePaused: false
);
#region CaptureMouse
var mMouseOriX = 0;
var mMouseOriY = 0;
var mMousePosX = 0;
var mMousePosY = 0;
c.onmousedown += async ev =>
{
mMouseOriX = ev.CursorX;
mMouseOriY = ev.CursorY;
mMousePosX = mMouseOriX;
mMousePosY = mMouseOriY;
// why aint it canvas?
//ev.Element
//ev.CaptureMouse();
// using ?
ev.Element.requestPointerLock();
await ev.Element.async.onmouseup;
Native.document.exitPointerLock();
mMouseOriX = -Math.Abs(mMouseOriX);
mMouseOriY = -Math.Abs(mMouseOriY);
};
c.onmousemove += ev =>
{
if (ev.MouseButton == IEvent.MouseButtonEnum.Left)
{
mMousePosX += ev.movementX;
mMousePosY += ev.movementY;
}
};
#endregion
mEffect.mPasses[0].mInputs[0] = new ChromeShaderToyColumns.Library.ShaderToy.samplerCube { };
mEffect.mPasses[0].MakeHeader_Image();
mEffect.mPasses[0].NewShader_Image(vs);
#region onresize
new { }.With(
async delegate
{
do
{
c.width = Native.window.Width;
//c.height = Native.window.Height / 2;
c.height = Native.window.Height;
c.style.SetSize(c.width, c.height);
}
while (await Native.window.async.onresize);
}
);
#endregion
Console.WriteLine("can you see any?");
var sw = Stopwatch.StartNew();
do
{
mEffect.mPasses[0].Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
mMouseOriX,
mMouseOriY,
mMousePosX,
mMousePosY
);
// what does it do?
gl.flush();
}
while (await Native.window.async.onframe);
}
);
}
示例3: Application
//.........这里部分代码省略.........
//renderer0.render(scene, cameraPX);
//rendererPY.render(scene, cameraPY);
// at this point we should be able to render the sphere texture
//public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;
//var cube0 = new IHTMLImage[] {
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
//};
new[] {
canvasPX, canvasNX,
canvasPY, canvasNY,
canvasPZ, canvasNZ
}.WithEachIndex(
(img, index) =>
{
gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);
//gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
// http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true
// https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);
}
);
//if (cameraz.valueAsNumber == 0)
gl.clearColor(0, 0, 0, 0);
//else
//gl4K.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
// could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
pass.Paint_Image(
0,
0,
0,
0,
0
//,
// gl_FragCoord
// cannot be scaled, and can be referenced directly.
// need another way to scale
//zoom: 0.3f
);
//paintsw.Stop();
// what does it do?
gl.flush();
// let render man know..
if (vsync != null)
if (!vsync.Task.IsCompleted)
vsync.SetResult(null);
};
Console.WriteLine("do you see it?");
}
);
}
示例4: Application
//.........这里部分代码省略.........
// Effect.prototype.Paint = function(time, mouseOriX, mouseOriY, mousePosX, mousePosY, isPaused)
// EffectPass.prototype.Paint = function( wa, gl, time, mouseOriX, mouseOriY, mousePosX, mousePosY, xres, yres, isPaused )
// EffectPass.prototype.Paint_Image = function( wa, gl, time, mouseOriX, mouseOriY, mousePosX, mousePosY, xres, yres )
var pass = new Library.ShaderToy.EffectPass(
mAudioContext,
gl,
precission: Library.ShaderToy.DetermineShaderPrecission(gl),
supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
callback: null,
obj: null,
forceMuted: false,
forcePaused: false,
//quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
outputGainNode: null
);
// how shall we upload our textures?
// can we reference GLSL.samplerCube yet?
//pass.mInputs[0] = new samplerCube { };
//pass.mInputs[0] = new Library.ShaderToy.samplerCube { };
var xsampler2D = new Library.ShaderToy.sampler2D { };
pass.mInputs[0] = xsampler2D;
pass.MakeHeader_Image();
pass.NewShader_Image(vs);
//var all = new Textures2 { }.Images;
var all = new[] {
new EquirectangularToAzimuthal.HTML.Images.FromAssets._20151001T0000 { }
};
new { }.With(
async delegate
{
var i = 0;
while (true)
{
xsampler2D.upload(
all[i % all.Length]
//new HTML.Images.FromAssets._20151016T0000 { }
);
i++;
await Task.Delay(1000);
}
}
);
var sw = Stopwatch.StartNew();
var paintsw = Stopwatch.StartNew();
new IHTMLPre { () => new { paintsw.ElapsedMilliseconds } }.AttachToDocument();
do
{
await [email protected];
paintsw.Restart();
pass.Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
mMouseOriX,
mMouseOriY,
mMousePosX,
mMousePosY
//,
// gl_FragCoord
// cannot be scaled, and can be referenced directly.
// need another way to scale
//zoom: 0.3f
);
paintsw.Stop();
// what does it do?
// need nonpartial code.
gl.flush();
}
while (await Native.window.async.onframe);
}
);
}
示例5: Application
//.........这里部分代码省略.........
// http://www.html5rocks.com/en/tutorials/webgl/webgl_fundamentals/
//gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
gl.viewport(0, 0, viewportxres, viewportyres);
// alpha to zero will only hide the pixel if blending is enabled.
gl.useProgram(mProgram);
// uniform4fv
var mouse = new[] { mousePosX, mousePosY, mouseOriX, mouseOriY };
var l2 = gl.getUniformLocation(mProgram, "iGlobalTime"); if (l2 != null) gl.uniform1f(l2, time);
var l3 = gl.getUniformLocation(mProgram, "iResolution"); if (l3 != null) gl.uniform3f(l3, viewportxres, viewportyres, 1.0f);
var l4 = gl.getUniformLocation(mProgram, "iMouse"); if (l4 != null) gl.uniform4fv(l4, mouse);
//var l7 = gl.getUniformLocation(this.mProgram, "iDate"); if (l7 != null) gl.uniform4fv(l7, dates);
//var l9 = gl.getUniformLocation(this.mProgram, "iSampleRate"); if (l9 != null) gl.uniform1f(l9, this.mSampleRate);
var ich0 = gl.getUniformLocation(mProgram, "iChannel0"); if (ich0 != null) gl.uniform1i(ich0, 0);
var ich1 = gl.getUniformLocation(mProgram, "iChannel1"); if (ich1 != null) gl.uniform1i(ich1, 1);
var ich2 = gl.getUniformLocation(mProgram, "iChannel2"); if (ich2 != null) gl.uniform1i(ich2, 2);
var ich3 = gl.getUniformLocation(mProgram, "iChannel3"); if (ich3 != null) gl.uniform1i(ich3, 3);
//for (var i = 0; i < mInputs.Length; i++)
//{
// var inp = mInputs[i];
// gl.activeTexture((uint)(gl.TEXTURE0 + i));
// if (inp == null)
// {
// gl.bindTexture(gl.TEXTURE_2D, null);
// }
//}
var times = new[] { 0.0f, 0.0f, 0.0f, 0.0f };
var l5 = gl.getUniformLocation(mProgram, "iChannelTime");
if (l5 != null) gl.uniform1fv(l5, times);
var resos = new float[12] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f };
var l8 = gl.getUniformLocation(mProgram, "iChannelResolution");
if (l8 != null) gl.uniform3fv(l8, resos);
// using ?
var l1 = (uint)gl.getAttribLocation(mProgram, "pos");
gl.bindBuffer(gl.ARRAY_BUFFER, quadVBO);
gl.vertexAttribPointer(l1, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(l1);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// first frame is now visible
gl.disableVertexAttribArray(l1);
#endregion
//mFrame++;
};
#endregion
var sw = Stopwatch.StartNew();
do
{
pass1.Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
mMouseOriX,
mMouseOriY,
mMousePosX,
mMousePosY,
zoom: 1.0f
);
pass0.Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
mMouseOriX,
mMouseOriY,
mMousePosX,
mMousePosY,
//zoom: 0.5f
zoom: mMousePosX / (float)c.width
);
// what does it do?
gl.flush();
}
while (await Native.window.async.onframe);
}
);
}
示例6: Application
//.........这里部分代码省略.........
//drawBall_gray(0, -2, 0, 1.5f);
//drawBall_white(0, -3.5f, 0, 1f);
//drawBall_gray(-2, -1, 0, 1.5f);
//drawBall_gray(2, 1, 0, 1.5f);
//drawBall_white(3 + 0.5f, 1.5f + 0.5f, 0, 1f);
//drawBall_gray(0, 2, 0, 1.5f);
//drawBall_white(0, 3.5f, 0, 1f);
//drawBall_gray(-2, 1, 0, 1.5f);
//#endregion
//#region CH2-CH2
//drawBall_white(6, -1 + 1, -1.5f, 1f);
//drawBall_gray(6, -1, 0, 1.5f);
//drawBall_white(6, -1 + 1, 1.5f, 1f);
//drawBall_white(4, -2 - 1, -1.5f, 1f);
//drawBall_gray(4, -2, 0, 1.5f);
//drawBall_white(4, -2 - 1, 1.5f, 1f);
//#endregion
//#region NH2
//drawBall_white(8, -2 - 1, -1.5f, 1f);
//drawBall_blue(8, -2, 0, 1.5f);
//drawBall_white(8, -2 - 1, 1.5f, 1f);
//#endregion
gl.flush();
};
#endregion
#region mouse
canvas.onmousedown += ev =>
{
ev.preventDefault();
drag = 1;
xOffs = ev.CursorX;
yOffs = ev.CursorY;
};
canvas.onmouseup += ev =>
{
ev.preventDefault();
drag = 0;
xOffs = ev.CursorX;
yOffs = ev.CursorY;
};
canvas.onmousemove += ev =>
{
if (drag == 0)
return;
ev.preventDefault();
if (ev.shiftKey)
{
示例7: AttachToDocument
//.........这里部分代码省略.........
//}
//.With(a => { a.onclick += e => { e.preventDefault(); Native.window.open("about:gpu"); }; } )
}.AttachToDocument();
};
#endregion
#region onresize
new { }.With(
async delegate
{
do
{
c.width = Native.window.Width;
c.height = Native.window.Height;
c.style.SetSize(c.width, c.height);
}
while (await Native.window.async.onresize);
}
);
#endregion
#region CaptureMouse
var mMouseOriX = 0;
var mMouseOriY = 0;
var mMousePosX = 0;
var mMousePosY = 0;
c.onmousedown += ev =>
{
mMouseOriX = ev.CursorX;
mMouseOriY = c.height - ev.CursorY;
mMousePosX = mMouseOriX;
mMousePosY = mMouseOriY;
ev.CaptureMouse();
};
c.onmousemove += ev =>
{
if (ev.MouseButton == IEvent.MouseButtonEnum.Left)
{
mMousePosX = ev.CursorX;
// X:\jsc.svn\examples\javascript\chrome\apps\WebGL\synergy\InputMouseByIq\InputMouseByIq\Shaders\Program.frag
//mMousePosY = ev.CursorY;
mMousePosY = c.height - ev.CursorY;
}
};
c.onmouseup += ev =>
{
mMouseOriX = -Math.Abs(mMouseOriX);
mMouseOriY = -Math.Abs(mMouseOriY);
};
#endregion
var mEffect = new ChromeShaderToyColumns.Library.ShaderToy.Effect(
mAudioContext,
gl,
callback: delegate
{
new IHTMLPre { "at callback" }.AttachToDocument();
},
obj: null,
forceMuted: false,
forcePaused: false
);
mEffect.mPasses[0].MakeHeader_Image();
mEffect.mPasses[0].NewShader_Image(vs);
var sw = Stopwatch.StartNew();
do
{
mEffect.mPasses[0].Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
mMouseOriX,
mMouseOriY,
mMousePosX,
mMousePosY
);
// what does it do?
gl.flush();
}
while (await Native.window.async.onframe);
}
示例8: Application
//.........这里部分代码省略.........
async delegate
{
Native.body.style.margin = "0px";
var vs = new Shaders.ProgramFragmentShader();
var mAudioContext = new AudioContext();
var gl = new WebGLRenderingContext(alpha: true);
var c = gl.canvas.AttachToDocument();
#region onresize
new { }.With(
async delegate
{
do
{
c.width = Native.window.Width;
c.height = Native.window.Height;
c.style.SetSize(c.width, c.height);
}
while (await Native.window.async.onresize);
}
);
#endregion
#region CaptureMouse
var mMouseOriX = 0;
var mMouseOriY = 0;
var mMousePosX = 0;
var mMousePosY = 0;
c.onmousedown += ev =>
{
mMouseOriX = ev.CursorX;
mMouseOriY = ev.CursorY;
mMousePosX = mMouseOriX;
mMousePosY = mMouseOriY;
ev.CaptureMouse();
};
c.onmousemove += ev =>
{
if (ev.MouseButton == IEvent.MouseButtonEnum.Left)
{
mMousePosX = ev.CursorX;
mMousePosY = c.height - ev.CursorY;
}
};
c.onmouseup += ev =>
{
mMouseOriX = -Math.Abs(mMouseOriX);
mMouseOriY = -Math.Abs(mMouseOriY);
};
#endregion
var mEffect = new ChromeShaderToyColumns.Library.ShaderToy.Effect(
mAudioContext,
gl,
callback: delegate
{
new IHTMLPre { "at callback" }.AttachToDocument();
},
obj: null,
forceMuted: false,
forcePaused: false
);
mEffect.mPasses[0].MakeHeader_Image();
mEffect.mPasses[0].NewShader_Image(vs);
var sw = Stopwatch.StartNew();
do
{
mEffect.mPasses[0].Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
mMouseOriX,
mMouseOriY,
mMousePosX,
mMousePosY
);
// what does it do?
gl.flush();
}
while (await Native.window.async.onframe);
}
);
}
示例9: Application
//.........这里部分代码省略.........
var vertices = new Float32Array(fvertices);
#endregion
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.vertexAttribPointer(vec2pos, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vec2pos);
// GL ERROR :GL_INVALID_OPERATION : glDrawArrays: attempt to render with no buffer attached to enabled attribute 1
gl.drawArrays(gl.TRIANGLES, 0, 6);
// first frame is now visible
gl.disableVertexAttribArray(vec2pos);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
#endregion
//mFrame++;
};
#endregion
Paint_Image(
sw.ElapsedMilliseconds / 1000.0f,
0,
0,
0,
0
);
gl.flush();
//// INVALID_OPERATION: generateMipmap: level 0 not power of 2 or not all the same size
gl.bindTexture(gl.TEXTURE_2D, xWebGLTexture);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
#endregion
gl.clearColor(0, 0, 1, 1.0f);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
glMatrix.mat4.perspective(45f, (float)gl_viewportWidth / (float)gl_viewportHeight, 0.1f, 120.0f, pMatrix);
glMatrix.mat4.identity(mvMatrix);
glMatrix.mat4.translate(mvMatrix, new float[] {
-1.5f + (f)Math.Cos(
sw.ElapsedMilliseconds
//slow it down
*0.001f
)
, 0.0f, -15.0f });
//glMatrix.mat4.translate(mvMatrix, new float[] { 3.0f, 0.0f, 0.0f });
gl.useProgram(shaderProgram);
// X:\jsc.svn\examples\javascript\WebGL\WebGLLesson05\WebGLLesson05\Application.cs
示例10: Application
//.........这里部分代码省略.........
//public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;
//var cube0 = new IHTMLImage[] {
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
//};
#region Paint_Image
new[] {
canvasPX, canvasNX,
canvasPY, canvasNY,
canvasPZ, canvasNZ
}.WithEachIndex(
(img, index) =>
{
gl4K.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);
//gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
gl4K.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
// http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true
// https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
gl4K.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);
}
);
// http://stackoverflow.com/questions/11544608/how-to-clear-a-rectangle-area-in-webgl
if (cameraz.valueAsNumber == 0)
gl4K.clearColor(0, 0, 0, 0);
else
gl4K.clearColor(0, 0, 0, 1);
gl4K.clear(gl.COLOR_BUFFER_BIT);
// could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
pass.Paint_Image(
0,
0,
0,
0,
0
//,
// gl_FragCoord
// cannot be scaled, and can be referenced directly.
// need another way to scale
//zoom: 0.3f
);
//paintsw.Stop();
// what does it do?
gl4K.flush();
#endregion
}
// let render man know..
if (vsync1renderman != null)
if (!vsync1renderman.Task.IsCompleted)
vsync1renderman.SetResult(null);
if (vsync0ambient != null)
if (!vsync0ambient.Task.IsCompleted)
vsync0ambient.SetResult(null);
};
}
//);
Console.WriteLine("do you see it?");
}
示例11: Application
//.........这里部分代码省略.........
renderer0.render(scene, cameraNY);
//canvasNY.save();
//canvasNY.translate(size, 0);
//canvasNY.rotate((float)(Math.PI / 2));
canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
//canvasNY.restore();
// ?
#endregion
//renderer0.render(scene, cameraPX);
//rendererPY.render(scene, cameraPY);
// at this point we should be able to render the sphere texture
//public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;
//var cube0 = new IHTMLImage[] {
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
//};
new[] {
canvasPX, canvasNX,
canvasPY, canvasNY,
canvasPZ, canvasNZ
}.WithEachIndex(
(img, index) =>
{
gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);
//gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
// http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true
// https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);
}
);
// could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
pass.Paint_Image(
0,
0,
0,
0,
0
//,
// gl_FragCoord
// cannot be scaled, and can be referenced directly.
// need another way to scale
//zoom: 0.3f
);
//paintsw.Stop();
// what does it do?
gl.flush();
// let render man know..
if (vsync != null)
if (!vsync.Task.IsCompleted)
vsync.SetResult(null);
};
}
);
Console.WriteLine("do you see it?");
}
示例12: Application
//.........这里部分代码省略.........
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.uniform4f(colorLoc, 1, 1, 0, 1);
drawBall(1, 1, 1); drawBall(-1, 1, 1); drawBall(1, -1, 1);
drawBall(1, 1, -1); drawBall(-1, -1, 1); drawBall(-1, 1, -1);
drawBall(1, -1, -1); drawBall(-1, -1, -1);
mvMatrix.load(rotMat);
mvMatrix.translate(0, 0, transl);
gl.uniformMatrix4fv(mvMatLoc, false,
new Float32Array(mvMatrix.getAsArray()));
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
gl.uniform4f(colorLoc, .0f, .0f, .9f, .7f);
gl.depthMask(false);
gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0);
gl.depthMask(true);
gl.disable(gl.BLEND);
gl.useProgram(line_prog);
gl.uniformMatrix4fv(gl.getUniformLocation(line_prog, "prMatrix"),
false, new Float32Array(prMatrix.getAsArray()));
gl.uniformMatrix4fv(mvMatLineLoc, false,
new Float32Array(mvMatrix.getAsArray()));
gl.drawArrays(gl.LINES, 0, 24);
gl.flush();
};
#endregion
drawScene();
#region AtResize
Action AtResize = delegate
{
gl_viewportWidth = Native.window.Width;
gl_viewportHeight = Native.window.Height;
prMatrix = new CanvasMatrix4();
//var aspect = (f)gl_viewportWidth / (f)gl_viewportHeight;
var aspect = Native.window.aspect;
Console.WriteLine(
new { gl_viewportWidth, gl_viewportHeight, aspect }
);
//Native.document.title = new { aspect }.ToString();
prMatrix.perspective(45f, (f)aspect, 1f, 100f);
canvas.style.SetLocation(0, 0, gl_viewportWidth, gl_viewportHeight);
canvas.width = gl_viewportWidth;
canvas.height = gl_viewportHeight;
示例13: Application
//.........这里部分代码省略.........
renderer0.render(scene, cameraNY);
//canvasNY.save();
//canvasNY.translate(size, 0);
//canvasNY.rotate((float)(Math.PI / 2));
canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
//canvasNY.restore();
// ?
#endregion
//renderer0.render(scene, cameraPX);
//rendererPY.render(scene, cameraPY);
// at this point we should be able to render the sphere texture
//public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
//public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
//public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;
//var cube0 = new IHTMLImage[] {
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
// new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
//};
new[] {
canvasPX, canvasNX,
canvasPY, canvasNY,
canvasPZ, canvasNZ
}.WithEachIndex(
(img, index) =>
{
gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);
//gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
// http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true
// https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
//gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);
}
);
// could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
pass.Paint_Image(
0,
0,
0,
0,
0
//,
// gl_FragCoord
// cannot be scaled, and can be referenced directly.
// need another way to scale
//zoom: 0.3f
);
//paintsw.Stop();
// what does it do?
gl.flush();
// let render man know..
if (vsync != null)
if (!vsync.Task.IsCompleted)
vsync.SetResult(null);
};
}
);
Console.WriteLine("do you see it?");
}
示例14: Application
//.........这里部分代码省略.........
Action<f, f, f, f> drawBall_white = (x, y, z, _scale) =>
drawBall(x, y, z, 1, 1, 1, _scale);
Action<f, f, f, f> drawBall_red = (x, y, z, _scale) =>
drawBall(x, y, z, 1, 0, 0, _scale);
#region drawScene
Action drawScene = delegate
{
gl.viewport(0, 0, gl_viewportWidth, gl_viewportHeight);
#region prMatrix
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "prMatrix"),
false, new Float32Array(prMatrix.getAsArray()));
#endregion
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
rotMat.rotate(xRot / 3, 1, 0, 0); rotMat.rotate(yRot / 3, 0, 1, 0);
yRot = 0; xRot = 0;
drawBall(0, 0, 0, .3f, .3f, .3f, 1.5f);
drawBall(1, 1, 1, .3f, .3f, .3f, 1.5f);
drawBall_white(2, 2, 0, 1);
drawBall_white(2, 0, 2, 1);
drawBall_white(0, 2, 2, 1);
drawBall_white(-1, -1, 1, 1);
drawBall_white(1, -1, -1, 1);
drawBall_red(-1, 1, -1, 1.5f);
drawBall_white(-2, 0, -2, 1);
gl.flush();
};
#endregion
#region mouse
canvas.onmousedown += ev =>
{
ev.PreventDefault();
drag = 1;
xOffs = ev.CursorX;
yOffs = ev.CursorY;
};
canvas.onmouseup += ev =>
{
ev.PreventDefault();
drag = 0;
xOffs = ev.CursorX;
yOffs = ev.CursorY;
};
canvas.onmousemove += ev =>
{
if (drag == 0)
return;
ev.PreventDefault();
if (ev.shiftKey)
{
transl *= 1 + (ev.CursorY - yOffs) / 1000;
示例15: Application
//.........这里部分代码省略.........
gl.clearDepth(1.0f);
gl.clearColor(0, 0, .5f, 1);
var xOffs = 0;
var yOffs = 0;
var drag = 0;
var xRot = 0;
var yRot = 0;
var transl = -1.5f;
Action drawScene = delegate
{
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "prMatrix"),
false, new Float32Array(prMatrix.getAsArray()));
gl.viewport(0, 0, gl_viewportWidth, gl_viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
rotMat.rotate(xRot / 5, 1, 0, 0);
rotMat.rotate(yRot / 5, 0, 1, 0);
yRot = 0;
xRot = 0;
mvMatrix.load(rotMat);
mvMatrix.translate(0, 0, transl);
gl.uniformMatrix4fv(mvMatLoc, false,
new Float32Array(mvMatrix.getAsArray()));
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 2 * nPhi);
gl.flush();
};
#region IsDisposed
var IsDisposed = false;
this.Dispose = delegate
{
if (IsDisposed)
return;
IsDisposed = true;
canvas.Orphanize();
};
#endregion
#region requestFullscreen
Native.Document.body.ondblclick +=
delegate
{
if (IsDisposed)
return;
// http://tutorialzine.com/2012/02/enhance-your-website-fullscreen-api/
Native.Document.body.requestFullscreen();
};