本文整理汇总了Java中com.jme3.texture.FrameBuffer.setDepthBuffer方法的典型用法代码示例。如果您正苦于以下问题:Java FrameBuffer.setDepthBuffer方法的具体用法?Java FrameBuffer.setDepthBuffer怎么用?Java FrameBuffer.setDepthBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.jme3.texture.FrameBuffer
的用法示例。
在下文中一共展示了FrameBuffer.setDepthBuffer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: ImageCapturer
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public ImageCapturer(Camera cam, RenderManager renderManager, Node camNode, Node rootNode) {
this.cam = cam;
this.renderManager = renderManager;
this.camNode = camNode;
// bbuf = BufferUtils.createByteBuffer(cam.getWidth() * cam.getHeight() * 4);
bbuf = BufferUtils.createByteBuffer(cam.getWidth() * cam.getHeight() * 4);
image = new BufferedImage(cam.getWidth(), cam.getHeight(), IMAGE_TYPE);
vp = renderManager.createPreView(cam.getName() + " recorder", cam);
vp.setBackgroundColor(ColorRGBA.Black);
vp.setClearFlags(true, true, true);
fbuf = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
fbuf.setDepthBuffer(Format.Depth);
fbuf.setColorBuffer(Format.RGBA8);
vp.setOutputFrameBuffer(fbuf);
vp.attachScene(rootNode);
}
示例2: JfxRenderer
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public JfxRenderer(ImageView imageView, int width, int height, boolean depthBuffer) {
this.imageView = imageView;
this.width = width;
this.height = height;
this.byteBuffer = BufferUtils.createByteBuffer(width * height * 4);
this.scanlineStride = width * 4;
this.renderImage = new WritableImage(width, height);
this.imageView.setImage(renderImage);
if (depthBuffer) {
frameBuffer = new FrameBuffer(width, height, 1);
frameBuffer.setDepthBuffer(Format.Depth);
frameBuffer.setColorBuffer(Format.BGRA8);
} else {
frameBuffer = null;
}
}
示例3: simpleInitApp
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
@Override
public void simpleInitApp() {
int w = settings.getWidth();
int h = settings.getHeight();
//setup framebuffer
fb = new FrameBuffer(w, h, 1);
Texture2D fbTex = new Texture2D(w, h, Format.RGBA8);
fb.setDepthBuffer(Format.Depth);
fb.setColorTexture(fbTex);
// setup framebuffer's scene
Sphere sphMesh = new Sphere(20, 20, 1);
Material solidColor = assetManager.loadMaterial("Common/Materials/RedColor.j3m");
Geometry sphere = new Geometry("sphere", sphMesh);
sphere.setMaterial(solidColor);
fbNode.attachChild(sphere);
//setup main scene
Picture p = new Picture("Picture");
p.setPosition(0, 0);
p.setWidth(w);
p.setHeight(h);
p.setTexture(assetManager, fbTex, false);
rootNode.attachChild(p);
}
示例4: setupOffscreenView
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public Texture setupOffscreenView(){
Camera offCamera = new Camera(512, 512);
offView = renderManager.createPreView("Offscreen View", offCamera);
offView.setClearFlags(true, true, true);
offView.setBackgroundColor(ColorRGBA.DarkGray);
// create offscreen framebuffer
FrameBuffer offBuffer = new FrameBuffer(512, 512, 1);
//setup framebuffer's cam
offCamera.setFrustumPerspective(45f, 1f, 1f, 1000f);
offCamera.setLocation(new Vector3f(0f, 0f, -5f));
offCamera.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(512, 512, Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.Trilinear);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setColorTexture(offTex);
//set viewport to render to offscreen framebuffer
offView.setOutputFrameBuffer(offBuffer);
// setup framebuffer's scene
Box boxMesh = new Box(Vector3f.ZERO, 1,1,1);
Material material = assetManager.loadMaterial("Interface/Logo/Logo.j3m");
offBox = new Geometry("box", boxMesh);
offBox.setMaterial(material);
// attach the scene to the viewport to be rendered
offView.attachScene(offBox);
return offTex;
}
示例5: createPreViews
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
protected void createPreViews() {
reflectionCam = new Camera(renderWidth, renderHeight);
refractionCam = new Camera(renderWidth, renderHeight);
// create a pre-view. a view that is rendered before the main view
reflectionView = new ViewPort("Reflection View", reflectionCam);
reflectionView.setClearFlags(true, true, true);
reflectionView.setBackgroundColor(ColorRGBA.Black);
// create offscreen framebuffer
reflectionBuffer = new FrameBuffer(renderWidth, renderHeight, 1);
//setup framebuffer to use texture
reflectionBuffer.setDepthBuffer(Format.Depth);
reflectionBuffer.setColorTexture(reflectionTexture);
//set viewport to render to offscreen framebuffer
reflectionView.setOutputFrameBuffer(reflectionBuffer);
reflectionView.addProcessor(new ReflectionProcessor(reflectionCam, reflectionBuffer, reflectionClipPlane));
// attach the scene to the viewport to be rendered
reflectionView.attachScene(reflectionScene);
// create a pre-view. a view that is rendered before the main view
refractionView = new ViewPort("Refraction View", refractionCam);
refractionView.setClearFlags(true, true, true);
refractionView.setBackgroundColor(ColorRGBA.Black);
// create offscreen framebuffer
refractionBuffer = new FrameBuffer(renderWidth, renderHeight, 1);
//setup framebuffer to use texture
refractionBuffer.setDepthBuffer(Format.Depth);
refractionBuffer.setColorTexture(refractionTexture);
refractionBuffer.setDepthTexture(depthTexture);
//set viewport to render to offscreen framebuffer
refractionView.setOutputFrameBuffer(refractionBuffer);
refractionView.addProcessor(new RefractionProcessor());
// attach the scene to the viewport to be rendered
refractionView.attachScene(reflectionScene);
}
示例6: SubScreenBridge
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public SubScreenBridge(RenderManager rm, int width, int height, Node root) {
this.rm = rm;
this.root = root;
cam = new Camera(width, height);
cam.setParallelProjection(true);
cam.setFrustumPerspective(45, 90, 0, 1);
vp = rm.createPreView("Offscreen View", cam);
if (!ToolKit.isAndroid()) vp.setClearFlags(true, true, true);
else vp.setClearFlags(true, false, false);
FrameBuffer offBuffer = new FrameBuffer(width, height, 1);
tex = new Texture2D(width, height, Image.Format.RGBA8);
tex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
tex.setMagFilter(Texture.MagFilter.Bilinear);
if (!ToolKit.isAndroid())
offBuffer.setDepthBuffer(Image.Format.Depth);
offBuffer.setColorTexture(tex);
vp.setOutputFrameBuffer(offBuffer);
setSpatial(root);
vp.attachScene(root);
}
示例7: setupOffBuffer
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
private void setupOffBuffer() {
image = new BufferedImage(width, height,
BufferedImage.TYPE_4BYTE_ABGR);
cpuBuf = BufferUtils.createByteBuffer(width * height * 4);
// cpuArray = new byte[width * height * 4];
offBuffer = new FrameBuffer(width, height, 0);
//setup framebuffer to use texture
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setColorBuffer(Format.RGBA8);
//set viewport to render to offscreen framebuffer
viewPort.setOutputFrameBuffer(offBuffer);
camera.resize(width, height, false);
}
示例8: fillMatrix
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
/**
* Renders the given scene in a top-down manner in the given matrix
* @param matrix
* @param scene
*/
private void fillMatrix(Matrix matrix, Spatial scene, boolean max) {
//init
Camera cam = new Camera(size, size);
cam.setParallelProjection(true);
ViewPort view = new ViewPort("Off", cam);
view.setClearFlags(true, true, true);
FrameBuffer buffer = new FrameBuffer(size, size, 1);
buffer.setDepthBuffer(Image.Format.Depth);
buffer.setColorBuffer(Image.Format.RGBA32F);
view.setOutputFrameBuffer(buffer);
view.attachScene(scene);
//render
scene.updateGeometricState();
view.setEnabled(true);
app.getRenderManager().renderViewPort(view, 0);
//retrive data
ByteBuffer data = BufferUtils.createByteBuffer(size*size*4*4);
app.getRenderer().readFrameBufferWithFormat(buffer, data, Image.Format.RGBA32F);
data.rewind();
for (int y=0; y<size; ++y) {
for (int x=0; x<size; ++x) {
// byte d = data.get();
// matrix.set(x, y, (d & 0xff) / 255.0);
// data.get(); data.get(); data.get();
double v = data.getFloat();
double old = matrix.get(x, y);
if (max) {
v = Math.max(v, old);
} else {
v += old;
}
matrix.set(x, y, v);
data.getFloat(); data.getFloat(); data.getFloat();
}
}
}
示例9: simpleInitApp
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public void simpleInitApp() {
this.flyCam.setMoveSpeed(10);
cam.setLocation(new Vector3f(6.0344796f, 1.5054002f, 55.572033f));
cam.setRotation(new Quaternion(0.0016069f, 0.9810479f, -0.008143323f, 0.19358753f));
makeScene();
//Creating the main view port post processor
FilterPostProcessor fpp = new FilterPostProcessor(assetManager);
fpp.addFilter(new ColorOverlayFilter(ColorRGBA.Blue));
viewPort.addProcessor(fpp);
//creating a frame buffer for the mainviewport
FrameBuffer mainVPFrameBuffer = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
Texture2D mainVPTexture = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
mainVPFrameBuffer.addColorTexture(mainVPTexture);
mainVPFrameBuffer.setDepthBuffer(Image.Format.Depth);
viewPort.setOutputFrameBuffer(mainVPFrameBuffer);
//creating the post processor for the gui viewport
final FilterPostProcessor guifpp = new FilterPostProcessor(assetManager);
guifpp.addFilter(new ColorOverlayFilter(ColorRGBA.Red));
//this will compose the main viewport texture with the guiviewport back buffer.
//Note that you can swich the order of the filters so that guiviewport filters are applied or not to the main viewport texture
guifpp.addFilter(new ComposeFilter(mainVPTexture));
guiViewPort.addProcessor(guifpp);
//compositing is done by mixing texture depending on the alpha channel,
//it's important that the guiviewport clear color alpha value is set to 0
guiViewPort.setBackgroundColor(ColorRGBA.BlackNoAlpha);
guiViewPort.setClearColor(true);
}
示例10: TransfertImage
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
TransfertImage(int width, int height) {
this.width = width;
this.height = height;
fb = new FrameBuffer(width, height, 1);
fb.setDepthBuffer(Format.Depth);
fb.setColorBuffer(Format.BGRA8);
byteBuf = BufferUtils.createByteBuffer(width * height * BGRA_size);
}
示例11: SubScreenBridge
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public SubScreenBridge(RenderManager rm, int width, int height, Node root) {
this.rm = rm;
this.root = root;
cam = new Camera(width, height);
cam.setParallelProjection(true);
cam.setFrustumPerspective(45, 90, 0, 1);
vp = rm.createPreView("Offscreen View", cam);
if (!Screen.isAndroid()) vp.setClearFlags(true, true, true);
else vp.setClearFlags(true, false, false);
FrameBuffer offBuffer = new FrameBuffer(width, height, 1);
tex = new Texture2D(width, height, Image.Format.RGBA8);
tex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
tex.setMagFilter(Texture.MagFilter.Bilinear);
if (!Screen.isAndroid())
offBuffer.setDepthBuffer(Image.Format.Depth);
offBuffer.setColorTexture(tex);
vp.setOutputFrameBuffer(offBuffer);
setSpatial(root);
vp.attachScene(root);
}
示例12: reshapeInThread
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
private void reshapeInThread(int width, int height) {
byteBuf = BufferUtils.ensureLargeEnough(byteBuf, width * height * 4);
intBuf = byteBuf.asIntBuffer();
fb = new FrameBuffer(width, height, 1);
fb.setDepthBuffer(Format.Depth);
fb.setColorBuffer(Format.RGB8);
if (attachAsMain){
rm.getRenderer().setMainFrameBufferOverride(fb);
}
synchronized (lock){
img = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
}
// synchronized (lock){
// img = (BufferedImage) getGraphicsConfiguration().createCompatibleImage(width, height);
// }
AffineTransform tx = AffineTransform.getScaleInstance(1, -1);
tx.translate(0, -img.getHeight());
transformOp = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
for (ViewPort vp : viewPorts){
if (!attachAsMain){
vp.setOutputFrameBuffer(fb);
}
vp.getCamera().resize(width, height, true);
// NOTE: Hack alert. This is done ONLY for custom framebuffers.
// Main framebuffer should use RenderManager.notifyReshape().
for (SceneProcessor sp : vp.getProcessors()){
sp.reshape(vp, width, height);
}
}
}
示例13: setupOffscreenView
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public void setupOffscreenView(){
offCamera = new Camera(width, height);
// create a pre-view. a view that is rendered before the main view
offView = renderManager.createPreView("Offscreen View", offCamera);
offView.setBackgroundColor(ColorRGBA.DarkGray);
offView.setClearFlags(true, true, true);
// this will let us know when the scene has been rendered to the
// frame buffer
offView.addProcessor(this);
// create offscreen framebuffer
offBuffer = new FrameBuffer(width, height, 1);
//setup framebuffer's cam
offCamera.setFrustumPerspective(45f, 1f, 1f, 1000f);
offCamera.setLocation(new Vector3f(0f, 0f, -5f));
offCamera.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y);
//setup framebuffer's texture
// offTex = new Texture2D(width, height, Format.RGBA8);
//setup framebuffer to use renderbuffer
// this is faster for gpu -> cpu copies
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setColorBuffer(Format.RGBA8);
// offBuffer.setColorTexture(offTex);
//set viewport to render to offscreen framebuffer
offView.setOutputFrameBuffer(offBuffer);
// setup framebuffer's scene
Box boxMesh = new Box(Vector3f.ZERO, 1,1,1);
Material material = assetManager.loadMaterial("Interface/Logo/Logo.j3m");
offBox = new Geometry("box", boxMesh);
offBox.setMaterial(material);
// attach the scene to the viewport to be rendered
offView.attachScene(offBox);
}
示例14: OSRBridge
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
public OSRBridge(RenderManager rm, int width, int height, Node root) {
this.rm = rm;
this.root = root;
cam = new Camera(width, height);
vp = rm.createPreView("Offscreen View", cam);
if (!ToolKit.isAndroid())
vp.setClearFlags(true, true, true);
else
vp.setClearFlags(true, false, false);
FrameBuffer offBuffer = new FrameBuffer(width, height, 1);
tex = new Texture2D(width, height, Image.Format.RGBA8);
tex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
tex.setMagFilter(Texture.MagFilter.Bilinear);
if (!ToolKit.isAndroid())
offBuffer.setDepthBuffer(Image.Format.Depth);
offBuffer.setColorTexture(tex);
vp.setOutputFrameBuffer(offBuffer);
setSpatial(root);
vp.attachScene(root);
chaseCam = new ChaseCamera(cam, root) {
@Override
public void setDragToRotate(boolean dragToRotate) {
this.dragToRotate = dragToRotate;
this.canRotate = !dragToRotate;
}
};
chaseCam.setDefaultDistance(5f);
chaseCam.setMaxDistance(340f);
chaseCam.setDefaultHorizontalRotation(90 * FastMath.DEG_TO_RAD);
chaseCam.setDefaultVerticalRotation(0f);
cam.setFrustumFar(36000f);
float aspect = (float) cam.getWidth() / (float) cam.getHeight();
cam.setFrustumPerspective(45f, aspect, 0.1f, cam.getFrustumFar());
chaseCam.setUpVector(Vector3f.UNIT_Y);
}
示例15: fillSlopeMatrix
import com.jme3.texture.FrameBuffer; //导入方法依赖的package包/类
/**
* Renders the given scene in a top-down manner in the given matrix
* @param matrix
* @param scene
*/
private void fillSlopeMatrix(Spatial scene) {
//init
Camera cam = new Camera(size, size);
cam.setParallelProjection(true);
ViewPort view = new ViewPort("Off", cam);
view.setClearFlags(true, true, true);
view.setBackgroundColor(new ColorRGBA(0.5f, 0.5f, 0.5f, 0f));
FrameBuffer buffer = new FrameBuffer(size, size, 1);
buffer.setDepthBuffer(Image.Format.Depth);
buffer.setColorBuffer(Image.Format.RGBA32F);
view.setOutputFrameBuffer(buffer);
view.attachScene(scene);
//render
scene.updateGeometricState();
view.setEnabled(true);
app.getRenderManager().renderViewPort(view, 0);
//retrive data
ByteBuffer data = BufferUtils.createByteBuffer(size*size*4*4);
app.getRenderer().readFrameBufferWithFormat(buffer, data, Image.Format.RGBA32F);
data.rewind();
for (int y=0; y<size; ++y) {
for (int x=0; x<size; ++x) {
// double gx = (((data.get() & 0xff) / 256.0) - 0.5) * 2;
// double gy = (((data.get() & 0xff) / 256.0) - 0.5) * 2;
double gx = (data.getFloat() - 0.5) * 2;
double gy = (data.getFloat() - 0.5) * 2;
double s = Math.sqrt(gx*gx + gy*gy);
if (s==0) {
gx=0; gy=0; s=1;
}
gradX.set(x, y, (gx / s) + gradX.get(x, y));
gradY.set(x, y, (gy / s) + gradY.get(x, y));
// double v = (((data.get() & 0xff) / 255.0) - 0.5);
double v = (data.getFloat() - 0.5);
if (Math.abs(v)<0.002) {
v=0;
}
gradH.set(x, y, v*2 + gradH.get(x, y));
// data.get();
double a = data.getFloat();
alpha.set(x, y, a);
}
}
}