本文整理汇总了Java中com.jme3.texture.Image.Format.RGBA8属性的典型用法代码示例。如果您正苦于以下问题:Java Format.RGBA8属性的具体用法?Java Format.RGBA8怎么用?Java Format.RGBA8使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类com.jme3.texture.Image.Format
的用法示例。
在下文中一共展示了Format.RGBA8属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readData
@Override
public void readData(Structure tex, BlenderContext blenderContext) {
super.readData(tex, blenderContext);
voronoiWeights = new float[4];
voronoiWeights[0] = ((Number) tex.getFieldValue("vn_w1")).floatValue();
voronoiWeights[1] = ((Number) tex.getFieldValue("vn_w2")).floatValue();
voronoiWeights[2] = ((Number) tex.getFieldValue("vn_w3")).floatValue();
voronoiWeights[3] = ((Number) tex.getFieldValue("vn_w4")).floatValue();
noisesize = ((Number) tex.getFieldValue("noisesize")).floatValue();
outscale = ((Number) tex.getFieldValue("ns_outscale")).floatValue();
mexp = ((Number) tex.getFieldValue("vn_mexp")).floatValue();
distanceType = ((Number) tex.getFieldValue("vn_distm")).intValue();
voronoiColorType = ((Number) tex.getFieldValue("vn_coltype")).intValue();
hashPoint = voronoiColorType != 0 ? new float[3] : null;
weightSum = voronoiWeights[0] + voronoiWeights[1] + voronoiWeights[2] + voronoiWeights[3];
if (weightSum != 0.0f) {
weightSum = outscale / weightSum;
}
if (voronoiColorType != 0 || colorBand != null) {
this.imageFormat = Format.RGBA8;
}
}
示例2: simpleInitApp
@Override
public void simpleInitApp() {
int w = settings.getWidth();
int h = settings.getHeight();
//setup framebuffer
fb = new FrameBuffer(w, h, 1);
Texture2D fbTex = new Texture2D(w, h, Format.RGBA8);
fb.setDepthBuffer(Format.Depth);
fb.setColorTexture(fbTex);
// setup framebuffer's scene
Sphere sphMesh = new Sphere(20, 20, 1);
Material solidColor = assetManager.loadMaterial("Common/Materials/RedColor.j3m");
Geometry sphere = new Geometry("sphere", sphMesh);
sphere.setMaterial(solidColor);
fbNode.attachChild(sphere);
//setup main scene
Picture p = new Picture("Picture");
p.setPosition(0, 0);
p.setWidth(w);
p.setHeight(h);
p.setTexture(assetManager, fbTex, false);
rootNode.attachChild(p);
}
示例3: setupOffscreenView
public Texture setupOffscreenView(){
Camera offCamera = new Camera(512, 512);
offView = renderManager.createPreView("Offscreen View", offCamera);
offView.setClearFlags(true, true, true);
offView.setBackgroundColor(ColorRGBA.DarkGray);
// create offscreen framebuffer
FrameBuffer offBuffer = new FrameBuffer(512, 512, 1);
//setup framebuffer's cam
offCamera.setFrustumPerspective(45f, 1f, 1f, 1000f);
offCamera.setLocation(new Vector3f(0f, 0f, -5f));
offCamera.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(512, 512, Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.Trilinear);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setColorTexture(offTex);
//set viewport to render to offscreen framebuffer
offView.setOutputFrameBuffer(offBuffer);
// setup framebuffer's scene
Box boxMesh = new Box(Vector3f.ZERO, 1,1,1);
Material material = assetManager.loadMaterial("Interface/Logo/Logo.j3m");
offBox = new Geometry("box", boxMesh);
offBox.setMaterial(material);
// attach the scene to the viewport to be rendered
offView.attachScene(offBox);
return offTex;
}
示例4: simpleInitApp
public void simpleInitApp() {
ViewPort niftyView = renderManager.createPreView("NiftyView", new Camera(1024, 768));
niftyView.setClearFlags(true, true, true);
NiftyJmeDisplay niftyDisplay = new NiftyJmeDisplay(assetManager,
inputManager,
audioRenderer,
niftyView);
nifty = niftyDisplay.getNifty();
nifty.fromXml("all/intro.xml", "start");
niftyView.addProcessor(niftyDisplay);
Texture2D depthTex = new Texture2D(1024, 768, Format.Depth);
FrameBuffer fb = new FrameBuffer(1024, 768, 1);
fb.setDepthTexture(depthTex);
Texture2D tex = new Texture2D(1024, 768, Format.RGBA8);
tex.setMinFilter(MinFilter.Trilinear);
tex.setMagFilter(MagFilter.Bilinear);
fb.setColorTexture(tex);
niftyView.setClearFlags(true, true, true);
niftyView.setOutputFrameBuffer(fb);
Box b = new Box(Vector3f.ZERO, 1, 1, 1);
Geometry geom = new Geometry("Box", b);
Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
mat.setTexture("ColorMap", tex);
geom.setMaterial(mat);
rootNode.attachChild(geom);
}
示例5: createTextures
protected void createTextures() {
reflectionTexture = new Texture2D(renderWidth, renderHeight, Format.RGBA8);
// MinFilter.Trilinear这个过滤设置可能是导致在三星平板下(Android5.0.2, Galaxy Note 10.1 - GT N8010)水体效果性能极其缓慢的问题。
// 该问题导致在Galaxy Note 10.1 - GT N8010下画面几乎降低20~30帧(50降低至20帧左右)
// (这个问题在华为HUWEI G6-U00(Android4.3)下却没有问题)
// reflectionTexture.setMinFilter(Texture.MinFilter.Trilinear);
// reflectionTexture.setMagFilter(Texture.MagFilter.Bilinear);
}
示例6: readData
@Override
public void readData(Structure tex, BlenderContext blenderContext) {
super.readData(tex, blenderContext);
noisesize = ((Number) tex.getFieldValue("noisesize")).floatValue();
noiseDepth = ((Number) tex.getFieldValue("noisedepth")).intValue();
noiseBasis = ((Number) tex.getFieldValue("noisebasis")).intValue();
noiseType = ((Number) tex.getFieldValue("noisetype")).intValue();
isHard = noiseType != TEX_NOISESOFT;
sType = ((Number) tex.getFieldValue("stype")).intValue();
if (sType == TEX_COLOR) {
this.imageFormat = Format.RGBA8;
}
}
示例7: readData
public void readData(Structure tex, BlenderContext blenderContext) {
flag = ((Number) tex.getFieldValue("flag")).intValue();
colorBand = new ColorBand(tex, blenderContext).computeValues();
bacd = new BrightnessAndContrastData(tex);
if (colorBand != null) {
imageFormat = Format.RGBA8;
}
}
示例8: reshape
public void reshape(ViewPort vp, int w, int h) {
diffuseData = new Texture2D(w, h, Format.RGBA8);
normalData = new Texture2D(w, h, Format.RGBA8);
specularData = new Texture2D(w, h, Format.RGBA8);
depthData = new Texture2D(w, h, Format.Depth);
mat = new Material(assetManager, "Common/MatDefs/Light/Deferred.j3md");
mat.setTexture("DiffuseData", diffuseData);
mat.setTexture("SpecularData", specularData);
mat.setTexture("NormalData", normalData);
mat.setTexture("DepthData", depthData);
display.setMaterial(mat);
display.setPosition(0, 0);
display.setWidth(w);
display.setHeight(h);
display1.setTexture(assetManager, diffuseData, false);
display2.setTexture(assetManager, normalData, false);
display3.setTexture(assetManager, specularData, false);
display4.setTexture(assetManager, depthData, false);
display1.setPosition(0, 0);
display2.setPosition(w/2, 0);
display3.setPosition(0, h/2);
display4.setPosition(w/2, h/2);
display1.setWidth(w/2);
display1.setHeight(h/2);
display2.setWidth(w/2);
display2.setHeight(h/2);
display3.setWidth(w/2);
display3.setHeight(h/2);
display4.setWidth(w/2);
display4.setHeight(h/2);
guiNode.updateGeometricState();
fb = new FrameBuffer(w, h, 1);
fb.setDepthTexture(depthData);
fb.addColorTexture(diffuseData);
fb.addColorTexture(normalData);
fb.addColorTexture(specularData);
fb.setMultiTarget(true);
/*
* Marks pixels in front of the far light boundary
Render back-faces of light volume
Depth test GREATER-EQUAL
Write to stencil on depth pass
Skipped for very small distant lights
*/
/*
* Find amount of lit pixels inside the volume
Start pixel query
Render front faces of light volume
Depth test LESS-EQUAL
Don’t write anything – only EQUAL stencil test
*/
/*
* Enable conditional rendering
Based on query results from previous stage
GPU skips rendering for invisible lights
*/
/*
* Render front-faces of light volume
Depth test - LESS-EQUAL
Stencil test - EQUAL
Runs only on marked pixels inside light
*/
}
示例9: createTextures
protected void createTextures() {
reflectionTexture = new Texture2D(renderWidth, renderHeight, Format.RGBA8);
refractionTexture = new Texture2D(renderWidth, renderHeight, Format.RGBA8);
depthTexture = new Texture2D(renderWidth, renderHeight, Format.Depth);
}
示例10: PssmShadowRenderer
/**
* Create a PSSM Shadow Renderer
* More info on the technique at http://http.developer.nvidia.com/GPUGems3/gpugems3_ch10.html
* @param manager the application asset manager
* @param size the size of the rendered shadowmaps (512,1024,2048, etc...)
* @param nbSplits the number of shadow maps rendered (the more shadow maps the more quality, the less fps).
*/
public PssmShadowRenderer(AssetManager manager, int size, int nbSplits) {
assetManager = manager;
nbSplits = Math.max(Math.min(nbSplits, 4), 1);
this.nbSplits = nbSplits;
shadowFB = new FrameBuffer[nbSplits];
shadowMaps = new Texture2D[nbSplits];
dispPic = new Picture[nbSplits];
lightViewProjectionsMatrices = new Matrix4f[nbSplits];
splits = new ColorRGBA();
splitsArray = new float[nbSplits + 1];
//DO NOT COMMENT THIS (it prevent the OSX incomplete read buffer crash)
dummyTex = new Texture2D(size, size, Format.RGBA8);
preshadowMat = new Material(manager, "Common/MatDefs/Shadow/PreShadow.j3md");
postshadowMat = new Material(manager, "Common/MatDefs/Shadow/PostShadowPSSM.j3md");
for (int i = 0; i < nbSplits; i++) {
lightViewProjectionsMatrices[i] = new Matrix4f();
shadowFB[i] = new FrameBuffer(size, size, 1);
shadowMaps[i] = new Texture2D(size, size, Format.Depth);
shadowFB[i].setDepthTexture(shadowMaps[i]);
//DO NOT COMMENT THIS (it prevent the OSX incomplete read buffer crash)
shadowFB[i].setColorTexture(dummyTex);
postshadowMat.setTexture("ShadowMap" + i, shadowMaps[i]);
//quads for debuging purpose
dispPic[i] = new Picture("Picture" + i);
dispPic[i].setTexture(manager, shadowMaps[i], false);
}
setCompareMode(CompareMode.Hardware);
setFilterMode(FilterMode.Bilinear);
shadowCam = new Camera(size, size);
shadowCam.setParallelProjection(true);
for (int i = 0; i < points.length; i++) {
points[i] = new Vector3f();
}
}
示例11: VFrame
public VFrame(int width, int height){
super(width, height, Format.RGBA8);
getImage().setData(BufferUtils.createByteBuffer(width*height*4));
}
示例12: setupOffscreenView
public Texture setupOffscreenView(){
Camera offCamera = new Camera(512, 512);
offView = renderManager.createPreView("Offscreen View", offCamera);
offView.setClearFlags(true, true, true);
offView.setBackgroundColor(ColorRGBA.DarkGray);
// create offscreen framebuffer
FrameBuffer offBuffer = new FrameBuffer(512, 512, 1);
//setup framebuffer's cam
offCamera.setFrustumPerspective(45f, 1f, 1f, 1000f);
offCamera.setLocation(new Vector3f(0f, 0f, -5f));
offCamera.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y);
//setup framebuffer's texture
TextureCubeMap offTex = new TextureCubeMap(512, 512, Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.Trilinear);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setMultiTarget(true);
offBuffer.addColorTexture(offTex, TextureCubeMap.Face.NegativeX);
offBuffer.addColorTexture(offTex, TextureCubeMap.Face.PositiveX);
offBuffer.addColorTexture(offTex, TextureCubeMap.Face.NegativeY);
offBuffer.addColorTexture(offTex, TextureCubeMap.Face.PositiveY);
offBuffer.addColorTexture(offTex, TextureCubeMap.Face.NegativeZ);
offBuffer.addColorTexture(offTex, TextureCubeMap.Face.PositiveZ);
//set viewport to render to offscreen framebuffer
offView.setOutputFrameBuffer(offBuffer);
// setup framebuffer's scene
Box boxMesh = new Box( 1,1,1);
Material material = assetManager.loadMaterial("Interface/Logo/Logo.j3m");
offBox = new Geometry("box", boxMesh);
offBox.setMaterial(material);
// attach the scene to the viewport to be rendered
offView.attachScene(offBox);
return offTex;
}
示例13: PssmShadowRenderer
/**
* Create a PSSM Shadow Renderer
* More info on the technique at <a href="http://http.developer.nvidia.com/GPUGems3/gpugems3_ch10.html">http://http.developer.nvidia.com/GPUGems3/gpugems3_ch10.html</a>
* @param manager the application asset manager
* @param size the size of the rendered shadowmaps (512,1024,2048, etc...)
* @param nbSplits the number of shadow maps rendered (the more shadow maps the more quality, the less fps).
*/
public PssmShadowRenderer(AssetManager manager, int size, int nbSplits) {
assetManager = manager;
nbSplits = Math.max(Math.min(nbSplits, 4), 1);
this.nbSplits = nbSplits;
shadowFB = new FrameBuffer[nbSplits];
shadowMaps = new Texture2D[nbSplits];
dispPic = new Picture[nbSplits];
lightViewProjectionsMatrices = new Matrix4f[nbSplits];
splits = new ColorRGBA();
splitsArray = new float[nbSplits + 1];
//DO NOT COMMENT THIS (it prevent the OSX incomplete read buffer crash)
dummyTex = new Texture2D(size, size, Format.RGBA8);
preshadowMat = new Material(manager, "Common/MatDefs/Shadow/PreShadow.j3md");
postshadowMat = new Material(manager, "Common/MatDefs/Shadow/PostShadowPSSM.j3md");
for (int i = 0; i < nbSplits; i++) {
lightViewProjectionsMatrices[i] = new Matrix4f();
shadowFB[i] = new FrameBuffer(size, size, 1);
shadowMaps[i] = new Texture2D(size, size, Format.Depth);
shadowFB[i].setDepthTexture(shadowMaps[i]);
//DO NOT COMMENT THIS (it prevent the OSX incomplete read buffer crash)
shadowFB[i].setColorTexture(dummyTex);
postshadowMat.setTexture("ShadowMap" + i, shadowMaps[i]);
//quads for debuging purpose
dispPic[i] = new Picture("Picture" + i);
dispPic[i].setTexture(manager, shadowMaps[i], false);
}
setCompareMode(CompareMode.Hardware);
setFilterMode(FilterMode.Bilinear);
setShadowIntensity(0.7f);
shadowCam = new Camera(size, size);
shadowCam.setParallelProjection(true);
for (int i = 0; i < points.length; i++) {
points[i] = new Vector3f();
}
}
示例14: load
public Object load(AssetInfo info) throws IOException {
InputStream in = null;
Bitmap bitmap = null;
try {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
in = info.openStream();
BitmapFactory.decodeStream(in,null, options);
float scaleW=(float)options.outWidth /256f;
float scaleH=(float)options.outHeight/256f;
float scale = 1; //Math.max(scaleW,scaleH);
in.close();
in = null;
options = new BitmapFactory.Options();
options.inJustDecodeBounds=false;
options.inPurgeable = false;
options.inSampleSize = (int)FastMath.ceil(scale);
in = info.openStream();
bitmap = BitmapFactory.decodeStream(in, null, options);
if (bitmap == null) {
throw new IOException("Failed to load image: " + info.getKey().getName());
}
} finally {
if (in != null) {
in.close();
}
}
int width = bitmap.getWidth();
int height = bitmap.getHeight();
Format fmt;
switch (bitmap.getConfig()) {
case ALPHA_8:
fmt = Format.Alpha8;
break;
case ARGB_4444:
fmt = Format.ARGB4444;
break;
case ARGB_8888:
fmt = Format.RGBA8;
break;
case RGB_565:
fmt = Format.RGB565;
break;
default:
// return null;
throw new IOException("Failed to load image: " + info.getKey().getName());
}
if (((TextureKey) info.getKey()).isFlipY()) {
Bitmap newBitmap = null;
Matrix flipMat = new Matrix();
flipMat.preScale(1.0f, -1.0f);
newBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), flipMat, false);
bitmap.recycle();
bitmap = newBitmap;
if (bitmap == null) {
throw new IOException("Failed to flip image: " + info.getKey().getName());
}
}
Image image = new Image(fmt, width, height, null);
image.setEfficentData(bitmap);
return image;
}
示例15: blend
@Override
public Image blend(Image image, Image baseImage, BlenderContext blenderContext) {
this.prepareImagesForBlending(image, baseImage);
float[] pixelColor = new float[] { color[0], color[1], color[2], 1.0f };
Format format = image.getFormat();
PixelInputOutput basePixelIO = null, pixelReader = PixelIOFactory.getPixelIO(format);
TexturePixel basePixel = null, pixel = new TexturePixel();
float[] materialColor = this.materialColor;
if (baseImage != null) {
basePixelIO = PixelIOFactory.getPixelIO(baseImage.getFormat());
materialColor = new float[this.materialColor.length];
basePixel = new TexturePixel();
}
int width = image.getWidth();
int height = image.getHeight();
int depth = image.getDepth();
if (depth == 0) {
depth = 1;
}
ArrayList<ByteBuffer> dataArray = new ArrayList<ByteBuffer>(depth);
float[] resultPixel = new float[4];
for (int dataLayerIndex = 0; dataLayerIndex < depth; ++dataLayerIndex) {
ByteBuffer data = image.getData(dataLayerIndex);
data.rewind();
ByteBuffer newData = BufferUtils.createByteBuffer(width * height * 4);
int dataIndex = 0, x = 0, y = 0, index = 0;
while (index < data.limit()) {
// getting the proper material color if the base texture is applied
if (basePixelIO != null) {
basePixelIO.read(baseImage, dataLayerIndex, basePixel, x, y);
basePixel.toRGBA(materialColor);
}
// reading the current texture's pixel
pixelReader.read(image, dataLayerIndex, pixel, index);
index += image.getFormat().getBitsPerPixel() >> 3;
pixel.toRGBA(pixelColor);
if (negateTexture) {
pixel.negate();
}
this.blendPixel(resultPixel, materialColor, pixelColor, blenderContext);
newData.put(dataIndex++, (byte) (resultPixel[0] * 255.0f));
newData.put(dataIndex++, (byte) (resultPixel[1] * 255.0f));
newData.put(dataIndex++, (byte) (resultPixel[2] * 255.0f));
newData.put(dataIndex++, (byte) (pixelColor[3] * 255.0f));
++x;
if (x >= width) {
x = 0;
++y;
}
}
dataArray.add(newData);
}
Image result = depth > 1 ? new Image(Format.RGBA8, width, height, depth, dataArray) : new Image(Format.RGBA8, width, height, dataArray.get(0));
if (image.getMipMapSizes() != null) {
result.setMipMapSizes(image.getMipMapSizes().clone());
}
return result;
}