本文整理匯總了Java中android.opengl.GLES20.glClear方法的典型用法代碼示例。如果您正苦於以下問題:Java GLES20.glClear方法的具體用法?Java GLES20.glClear怎麽用?Java GLES20.glClear使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類android.opengl.GLES20
的用法示例。
在下文中一共展示了GLES20.glClear方法的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: encodeTextureBuffer
import android.opengl.GLES20; //導入方法依賴的package包/類
private VideoCodecStatus encodeTextureBuffer(VideoFrame videoFrame) {
encodeThreadChecker.checkIsOnValidThread();
try {
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// It is not necessary to release this frame because it doesn't own the buffer.
VideoFrame derotatedFrame =
new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
textureEglBase.swapBuffers(videoFrame.getTimestampNs());
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
示例2: onDrawFrame
import android.opengl.GLES20; //導入方法依賴的package包/類
@Override
public void onDrawFrame(GL10 gl) {
if (width == 0 || height == 0) {
return;
}
SystemTime.tick();
long rightNow = SystemClock.elapsedRealtime();
step = (now == 0 ? 0 : rightNow - now);
now = rightNow;
step();
NoosaScript.get().resetCamera();
NoosaScriptNoLighting.get().resetCamera();
GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
draw();
}
示例3: generateFrame
import android.opengl.GLES20; //導入方法依賴的package包/類
/**
* Generates a frame of data using GL commands.
*/
private void generateFrame(int frameIndex) {
final int BOX_SIZE = 80;
frameIndex %= 240;
int xpos, ypos;
int absIndex = Math.abs(frameIndex - 120);
xpos = absIndex * WIDTH / 120;
ypos = absIndex * HEIGHT / 120;
float lumaf = absIndex / 120.0f;
GLES20.glClearColor(lumaf, lumaf, lumaf, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
GLES20.glScissor(BOX_SIZE / 2, ypos, BOX_SIZE, BOX_SIZE);
GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glScissor(xpos, BOX_SIZE / 2, BOX_SIZE, BOX_SIZE);
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
}
示例4: onDrawFrame
import android.opengl.GLES20; //導入方法依賴的package包/類
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame start");
if (!mHaveFBO)
return;
synchronized(this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
//Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
Log.d(LOGTAG, "texCamera(OES) -> screen");
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
}
//Log.i(LOGTAG, "onDrawFrame end");
}
}
示例5: onDrawFrame
import android.opengl.GLES20; //導入方法依賴的package包/類
/**
* 每繪製一幀時,這個方法都會被調用。
* 在這個方法中,一定要繪製一些東西,即使隻是清空屏幕
* todo test.因為:在這個方法返回後,渲染緩衝區會被交換並顯示到屏幕上,如果什麽都沒有畫,可能會看到糟糕的閃爍效果
* @param gl
*/
@Override
public void onDrawFrame(GL10 gl) {
//清空屏幕,擦除屏幕上的所有顏色,並用之前glClearColor()調用定義的顏色充滿整個屏幕
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GlUtil.checkGlError("glClear");
// int location,
// int count,
// boolean transpose,
// float[] value,
// int offset
//給著色器傳遞正交投影矩陣
GLES20.glUniformMatrix4fv(uMatrixLocation, 1, false, projectionMatrix, 0);
//3.5在屏幕上繪製
//更新著色器代碼中的u_Color的值。與屬性不同,uniform的分量沒有默認值
// GLES20.glUniform4f(aColorLocation, 1.0f, 1.0f, 1.0f, 1.0f);
GlUtil.checkGlError("glUniform4f");
//開始繪製三角形,從第0個點取6個點,共畫兩個三角形
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 6);
GlUtil.checkGlError("glDrawArrays");
// GLES20.glUniform4f(aColorLocation, 1.0f, 0.0f, 0.0f, 1.0f);
GlUtil.checkGlError("glUniform4f");
//開始繪製一條執行,從第0個點取2個點
GLES20.glDrawArrays(GLES20.GL_LINES, 6, 2);
GlUtil.checkGlError("glDrawArrays");
// GLES20.glUniform4f(aColorLocation, 0.0f, 0.0f, 1.0f, 1.0f);
//開始繪製兩個點,從第8個點開始取1個點
GLES20.glDrawArrays(GLES20.GL_POINTS, 8, 1);
// GLES20.glUniform4f(aColorLocation, 1.0f, 0.0f, 0.0f, 1.0f);
//開始繪製兩個點,從第9個點開始取1個點
GLES20.glDrawArrays(GLES20.GL_POINTS, 9, 1);
}
示例6: drawTex
import android.opengl.GLES20; //導入方法依賴的package包/類
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
if(fbo == 0)
GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight());
else
GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
示例7: onDrawFrame
import android.opengl.GLES20; //導入方法依賴的package包/類
@Override
public void onDrawFrame(GL10 unused) {
float[] scratch = new float[16];
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
// Draw square
mSquare.draw(mMVPMatrix);
// Create a rotation for the triangle
// Use the following code to generate constant rotation.
// Leave this code out when using TouchEvents.
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, 1.0f);
// Combine the rotation matrix with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0);
// Draw triangle
mTriangle.draw(scratch);
}
示例8: glClear
import android.opengl.GLES20; //導入方法依賴的package包/類
public static void glClear(final int aClearMask)
{
//.if DESKTOP
//|gl.glClear(aClearMask);
//.elseif ANDROID
GLES20.glClear(aClearMask);
//.endif
}
示例9: clearSurface
import android.opengl.GLES20; //導入方法依賴的package包/類
/**
* clear data from surface using opengl
*/
private void clearSurface(Object texture) {
EGL10 egl = (EGL10) EGLContext.getEGL();
EGLDisplay display = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
egl.eglInitialize(display, null);
int[] attribList = {
EGL10.EGL_RED_SIZE, 8, EGL10.EGL_GREEN_SIZE, 8, EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8, EGL10.EGL_RENDERABLE_TYPE, EGL10.EGL_WINDOW_BIT, EGL10.EGL_NONE, 0,
// placeholder for recordable [@-3]
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
egl.eglChooseConfig(display, attribList, configs, configs.length, numConfigs);
EGLConfig config = configs[0];
EGLContext context = egl.eglCreateContext(display, config, EGL10.EGL_NO_CONTEXT, new int[] {
12440, 2, EGL10.EGL_NONE
});
EGLSurface eglSurface = egl.eglCreateWindowSurface(display, config, texture, new int[] {
EGL10.EGL_NONE
});
egl.eglMakeCurrent(display, eglSurface, eglSurface, context);
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
egl.eglSwapBuffers(display, eglSurface);
egl.eglDestroySurface(display, eglSurface);
egl.eglMakeCurrent(display, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
egl.eglDestroyContext(display, context);
egl.eglTerminate(display);
}
示例10: draw
import android.opengl.GLES20; //導入方法依賴的package包/類
public void draw() {
if (mBlendng) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
} else {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
}
// Do a compile rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
// Set our pre-vertex lighting program.
GLES20.glUseProgram(mProgramHandle);
// Set program handle for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Color");
// right
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 4.0f, 0.0f, -7.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 1.0f, 0.0f, 0.0f);
drawCube();
// left
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, -4.0f, 0.0f, -7.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
drawCube();
// top
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 4.0f, -7.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawCube();
// bottom
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -4.0f, -7.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
drawCube();
// center
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -5.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 1.0f, 1.0f, 1.0f);
drawCube();
}
示例11: run
import android.opengl.GLES20; //導入方法依賴的package包/類
@Override
public void run() {
initGL(surfaceTexture);
// Setup camera filters map
cameraFilterMap.append(R.string.filter0, new OriginalFilter(context));
cameraFilterMap.append(R.string.filter1, new AsciiArtFilter(context));
cameraFilterMap.append(R.string.filter2, new BasicDeformFilter(context));
cameraFilterMap.append(R.string.filter3, new BlueorangeFilter(context));
cameraFilterMap.append(R.string.filter4, new ChromaticAberrationFilter(context));
cameraFilterMap.append(R.string.filter5, new ContrastFilter(context));
cameraFilterMap.append(R.string.filter6, new CrackedFilter(context));
cameraFilterMap.append(R.string.filter7, new CrosshatchFilter(context));
cameraFilterMap.append(R.string.filter8, new EdgeDetectionFilter(context));
cameraFilterMap.append(R.string.filter9, new EMInterferenceFilter(context));
cameraFilterMap.append(R.string.filter10, new LegofiedFilter(context));
cameraFilterMap.append(R.string.filter11, new LichtensteinEsqueFilter(context));
cameraFilterMap.append(R.string.filter12, new MappingFilter(context));
cameraFilterMap.append(R.string.filter13, new MoneyFilter(context));
cameraFilterMap.append(R.string.filter14, new NoiseWarpFilter(context));
cameraFilterMap.append(R.string.filter15, new PixelizeFilter(context));
cameraFilterMap.append(R.string.filter16, new PolygonizationFilter(context));
cameraFilterMap.append(R.string.filter17, new RefractionFilter(context));
cameraFilterMap.append(R.string.filter18, new TileMosaicFilter(context));
cameraFilterMap.append(R.string.filter19, new TrianglesMosaicFilter(context));
cameraFilterMap.append(R.string.filter20, new BeautyFilter(context));
setSelectedFilter(selectedFilterId);
// Create texture for camera preview
// cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
// cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
// Start camera preview
// try {
// camera.setPreviewTexture(cameraSurfaceTexture);
// camera.startPreview();
// } catch (IOException ioe) {
// // Something bad happened
// }
// Render loop
while (!Thread.currentThread().isInterrupted()) {
try {
if (gwidth < 0 && gheight < 0)
GLES20.glViewport(0, 0, gwidth = -gwidth, gheight = -gheight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Update the camera preview texture
synchronized (this) {
cameraSurfaceTexture.updateTexImage();
}
// Draw camera preview
selectedFilter.draw(cameraTextureId, gwidth, gheight);
// Flush
GLES20.glFlush();
egl10.eglSwapBuffers(eglDisplay, eglSurface);
Thread.sleep(DRAW_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
cameraSurfaceTexture.release();
GLES20.glDeleteTextures(1, new int[]{cameraTextureId}, 0);
}
示例12: testLateReturnFrame
import android.opengl.GLES20; //導入方法依賴的package包/類
/**
* Test disposing the SurfaceTextureHelper while holding a pending texture frame. The pending
* texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
* buffer and reading it back with glReadPixels().
*/
@Test
@MediumTest
public void testLateReturnFrame() throws InterruptedException {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
final int red = 79;
final int green = 66;
final int blue = 161;
// Draw a constant color frame onto the SurfaceTexture.
eglOesBase.makeCurrent();
GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglOesBase.swapBuffers();
eglOesBase.release();
// Wait for OES texture frame.
listener.waitForNewFrame();
// Diconnect while holding the frame.
surfaceTextureHelper.dispose();
// Draw the pending texture frame onto the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawOes(
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
drawer.release();
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
GlUtil.checkNoGLES2Error("glReadPixels");
eglBase.release();
// Assert rendered image is expected constant color.
while (rgbaData.hasRemaining()) {
assertEquals(rgbaData.get() & 0xFF, red);
assertEquals(rgbaData.get() & 0xFF, green);
assertEquals(rgbaData.get() & 0xFF, blue);
assertEquals(rgbaData.get() & 0xFF, 255);
}
// Late frame return after everything has been disposed and released.
surfaceTextureHelper.returnTextureFrame();
}