本文整理汇总了Java中android.opengl.GLES20.glGetIntegerv方法的典型用法代码示例。如果您正苦于以下问题:Java GLES20.glGetIntegerv方法的具体用法?Java GLES20.glGetIntegerv怎么用?Java GLES20.glGetIntegerv使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类android.opengl.GLES20
的用法示例。
在下文中一共展示了GLES20.glGetIntegerv方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: draw
import android.opengl.GLES20; //导入方法依赖的package包/类
@Override
public void draw(int texture) {
onTaskExec();
boolean isBlend= GLES20.glIsEnabled(GLES20.GL_BLEND);
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT,lastViewPort,0);
GLES20.glViewport(0,0,mWidth,mHeight);
if(mScaleFilter!=null){
mExportFilter.draw(mScaleFilter.drawToTexture(texture));
}else{
mExportFilter.draw(texture);
}
GLES20.glReadPixels(0,0,mWidth,mHeight*3/8,GLES20.GL_RGBA,GLES20.GL_UNSIGNED_BYTE,mTempBuffer);
GLES20.glViewport(lastViewPort[0],lastViewPort[1],lastViewPort[2],lastViewPort[3]);
if(isBlend){
GLES20.glEnable(GLES20.GL_BLEND);
}
}
示例2: createFrameBuffer
import android.opengl.GLES20; //导入方法依赖的package包/类
/**
* 创建FrameBuffer
* @param hasRenderBuffer 是否启用RenderBuffer
* @param width 宽度
* @param height 高度
* @param texType 类型,一般为{@link GLES20#GL_TEXTURE_2D}
* @param texFormat 纹理格式,一般为{@link GLES20#GL_RGBA}、{@link GLES20#GL_RGB}等
* @param minParams 纹理的缩小过滤参数
* @param maxParams 纹理的放大过滤参数
* @param wrapS 纹理的S环绕参数
* @param wrapT 纹理的W环绕参数
* @return 创建结果,0表示成功,其他值为GL错误
*/
public int createFrameBuffer(boolean hasRenderBuffer,int width,int height,int texType,int texFormat,
int minParams,int maxParams,int wrapS,int wrapT){
mFrameTemp=new int[4];
GLES20.glGenFramebuffers(1,mFrameTemp,0);
GLES20.glGenTextures(1,mFrameTemp,1);
GLES20.glBindTexture(texType,mFrameTemp[1]);
GLES20.glTexImage2D(texType, 0,texFormat, width, height,
0, texFormat, GLES20.GL_UNSIGNED_BYTE, null);
//设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
GLES20.glTexParameteri(texType, GLES20.GL_TEXTURE_MIN_FILTER,minParams);
//设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
GLES20.glTexParameteri(texType, GLES20.GL_TEXTURE_MAG_FILTER,maxParams);
//设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameteri(texType, GLES20.GL_TEXTURE_WRAP_S,wrapS);
//设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameteri(texType, GLES20.GL_TEXTURE_WRAP_T,wrapT);
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,3);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
texType, mFrameTemp[1], 0);
if(hasRenderBuffer){
GLES20.glGenRenderbuffers(1,mFrameTemp,2);
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,mFrameTemp[2]);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER,GLES20.GL_DEPTH_COMPONENT16,width,height);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER,GLES20.GL_DEPTH_ATTACHMENT,GLES20.GL_RENDERBUFFER,mFrameTemp[2]);
}
return GLES20.glGetError();
}
示例3: onDraw
import android.opengl.GLES20; //导入方法依赖的package包/类
@Override
protected void onDraw() {
//todo change blend and viewport
super.onDraw();
if(markTextureId!=-1){
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT,viewPort,0);
GLES20.glViewport(markPort[0],mHeight-markPort[3]-markPort[1],markPort[2],markPort[3]);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glBlendEquation(GLES20.GL_FUNC_ADD);
mark.draw(markTextureId);
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glViewport(viewPort[0],viewPort[1],viewPort[2],viewPort[3]);
}
//todo reset blend and view port
}
示例4: bindFrameBuffer
import android.opengl.GLES20; //导入方法依赖的package包/类
/**
* 绑定FrameBuffer,只有之前创建过FrameBuffer,才能调用此方法进行绑定
* @return 绑定结果
*/
public int bindFrameBuffer(){
if(mFrameTemp==null){
return -1;
}
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,3);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
return GLES20.glGetError();
}
示例5: getMaxTextureEgl14
import android.opengl.GLES20; //导入方法依赖的package包/类
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
private static int getMaxTextureEgl14() {
EGLDisplay dpy = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] vers = new int[2];
EGL14.eglInitialize(dpy, vers, 0, vers, 1);
int[] configAttr = {
EGL14.EGL_COLOR_BUFFER_TYPE, EGL14.EGL_RGB_BUFFER,
EGL14.EGL_LEVEL, 0,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfig = new int[1];
EGL14.eglChooseConfig(dpy, configAttr, 0,
configs, 0, 1, numConfig, 0);
if (numConfig[0] == 0) {
return 0;
}
EGLConfig config = configs[0];
int[] surfAttr = {
EGL14.EGL_WIDTH, 64,
EGL14.EGL_HEIGHT, 64,
EGL14.EGL_NONE
};
EGLSurface surf = EGL14.eglCreatePbufferSurface(dpy, config, surfAttr, 0);
int[] ctxAttrib = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
EGLContext ctx = EGL14.eglCreateContext(dpy, config, EGL14.EGL_NO_CONTEXT, ctxAttrib, 0);
EGL14.eglMakeCurrent(dpy, surf, surf, ctx);
int[] maxSize = new int[1];
GLES20.glGetIntegerv(GLES20.GL_MAX_TEXTURE_SIZE, maxSize, 0);
EGL14.eglMakeCurrent(dpy, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(dpy, surf);
EGL14.eglDestroyContext(dpy, ctx);
EGL14.eglTerminate(dpy);
return maxSize[0];
}
示例6: run
import android.opengl.GLES20; //导入方法依赖的package包/类
@Override
public void run() {
if(mOutputSurface==null){
AvLog.d("CameraRecorder GLThread exit : outputSurface==null");
return;
}
if(mPreviewWidth<=0||mPreviewHeight<=0){
AvLog.d("CameraRecorder GLThread exit : Preview Size==0");
return;
}
boolean ret=mShowEGLHelper.createGLESWithSurface(new EGLConfigAttrs(),new EGLContextAttrs(),mOutputSurface);
if(!ret){
AvLog.d("CameraRecorder GLThread exit : createGLES failed");
return;
}
if(mRenderer==null){
mRenderer=new WrapRenderer(null);
}
mRenderer.setFlag(WrapRenderer.TYPE_CAMERA);
mRenderer.create();
int[] t=new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,t,0);
mRenderer.sizeChanged(mPreviewWidth,mPreviewHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,t[0]);
BaseFilter mShowFilter=new LazyFilter();
BaseFilter mRecFilter=new LazyFilter();
MatrixUtils.flip(mShowFilter.getVertexMatrix(),false,true);
mShowFilter.create();
mShowFilter.sizeChanged(mPreviewWidth,mPreviewHeight);
MatrixUtils.getMatrix(mRecFilter.getVertexMatrix(),MatrixUtils.TYPE_CENTERCROP,
mPreviewWidth,mPreviewHeight,
mOutputWidth,mOutputHeight);
MatrixUtils.flip(mRecFilter.getVertexMatrix(),false,true);
mRecFilter.create();
mRecFilter.sizeChanged(mOutputWidth,mOutputHeight);
FrameBuffer mEncodeFrameBuffer=new FrameBuffer();
while (mGLThreadFlag){
try {
mSem.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
if(mGLThreadFlag){
long time=(System.currentTimeMillis()-BASE_TIME)*1000;
mInputTexture.updateTexImage();
mInputTexture.getTransformMatrix(mRenderer.getTextureMatrix());
synchronized (VIDEO_LOCK){
if(isRecordVideoStarted){
if(mEGLEncodeSurface==null){
mEGLEncodeSurface=mShowEGLHelper.createWindowSurface(mEncodeSurface);
}
mShowEGLHelper.makeCurrent(mEGLEncodeSurface);
mEncodeFrameBuffer.bindFrameBuffer(mPreviewWidth,mPreviewHeight);
mRenderer.draw(mInputTextureId);
mEncodeFrameBuffer.unBindFrameBuffer();
GLES20.glViewport(0,0,mConfig.getVideoFormat().getInteger(MediaFormat.KEY_WIDTH),
mConfig.getVideoFormat().getInteger(MediaFormat.KEY_HEIGHT));
mRecFilter.draw(mEncodeFrameBuffer.getCacheTextureId());
mShowEGLHelper.setPresentationTime(mEGLEncodeSurface,time*1000);
videoEncodeStep(false);
mShowEGLHelper.swapBuffers(mEGLEncodeSurface);
mShowEGLHelper.makeCurrent();
GLES20.glViewport(0,0,mPreviewWidth,mPreviewHeight);
mShowFilter.draw(mEncodeFrameBuffer.getCacheTextureId());
mShowEGLHelper.setPresentationTime(mShowEGLHelper.getDefaultSurface(),0);
mShowEGLHelper.swapBuffers(mShowEGLHelper.getDefaultSurface());
}else{
GLES20.glViewport(0,0,mPreviewWidth,mPreviewHeight);
mRenderer.draw(mInputTextureId);
mShowEGLHelper.swapBuffers(mShowEGLHelper.getDefaultSurface());
}
}
}
}
mShowEGLHelper.destroyGLES(mShowEGLHelper.getDefaultSurface(),mShowEGLHelper.getDefaultContext());
}