本文整理汇总了C++中TexturePtr::getBuffer方法的典型用法代码示例。如果您正苦于以下问题:C++ TexturePtr::getBuffer方法的具体用法?C++ TexturePtr::getBuffer怎么用?C++ TexturePtr::getBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TexturePtr
的用法示例。
在下文中一共展示了TexturePtr::getBuffer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getNullShadowTexture
//---------------------------------------------------------------------
TexturePtr ShadowTextureManager::getNullShadowTexture(PixelFormat format)
{
for (ShadowTextureList::iterator t = mNullTextureList.begin(); t != mNullTextureList.end(); ++t)
{
const TexturePtr& tex = *t;
if (format == tex->getFormat())
{
// Ok, a match
return tex;
}
}
// not found, create a new one
// A 1x1 texture of the correct format, not a render target
static const String baseName = "Ogre/ShadowTextureNull";
String targName = baseName + StringConverter::toString(mCount++);
TexturePtr shadowTex = TextureManager::getSingleton().createManual(
targName,
ResourceGroupManager::INTERNAL_RESOURCE_GROUP_NAME,
TEX_TYPE_2D, 1, 1, 0, format, TU_STATIC_WRITE_ONLY);
mNullTextureList.push_back(shadowTex);
// lock & populate the texture based on format
shadowTex->getBuffer()->lock(HardwareBuffer::HBL_DISCARD);
const PixelBox& box = shadowTex->getBuffer()->getCurrentLock();
// set high-values across all bytes of the format
PixelUtil::packColour( 1.0f, 1.0f, 1.0f, 1.0f, format, box.data );
shadowTex->getBuffer()->unlock();
return shadowTex;
}
示例2: createRenderToTextures
void PlaneNodeProcessor::createRenderToTextures(Ogre::Entity* entity, Plane* plane, MaterialPtr material, XERCES_CPP_NAMESPACE::DOMElement* rttElem)
{
if(rttElem == NULL)
return;
Camera* cam = CoreSubsystem::getSingleton().getWorld()->getSceneManager()->createCamera("Cam" + entity->getName());
cam->setNearClipDistance(CoreSubsystem::getSingleton().getWorld()->getActiveCamera()->getNearClipDistance());
cam->setFarClipDistance(CoreSubsystem::getSingleton().getWorld()->getActiveCamera()->getFarClipDistance());
//cam->setFarClipDistance(1000000);
cam->setAspectRatio(CoreSubsystem::getSingleton().getWorld()->getActiveCamera()->getAspectRatio());
cam->setFOVy(CoreSubsystem::getSingleton().getWorld()->getActiveCamera()->getFOVy());
AliasTextureNamePairList aliases;
if(getAttributeValueAsBool(rttElem, "reflection"))
{
TexturePtr texture = Ogre::TextureManager::getSingleton().createManual( "Reflection" + entity->getName(),
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, TEX_TYPE_2D,
512, 512, 0, PF_R8G8B8, TU_RENDERTARGET );
RenderTexture* rttTex = texture->getBuffer()->getRenderTarget();
Viewport *v = rttTex->addViewport( cam );
v->setOverlaysEnabled(false);
rttTex->addListener(new PlaneReflectionTextureListener(entity, cam, plane));
aliases["reflection"] = "Reflection" + entity->getName();
cam->enableCustomNearClipPlane((MovablePlane*)plane);
}
if(getAttributeValueAsBool(rttElem, "refraction"))
{
TexturePtr texture = Ogre::TextureManager::getSingleton().createManual( "Refraction" + entity->getName(),
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, TEX_TYPE_2D,
512, 512, 0, PF_R8G8B8, TU_RENDERTARGET );
RenderTexture* rttTex = texture->getBuffer()->getRenderTarget();
Viewport *v = rttTex->addViewport( cam);
v->setOverlaysEnabled(false);
rttTex->addListener(new PlaneRefractionTextureListener(entity, cam));
aliases["refraction"] = "Refraction" + entity->getName();
plane->normal = Vector3::NEGATIVE_UNIT_Y;
cam->enableCustomNearClipPlane((MovablePlane*)plane);
}
if(!material->applyTextureAliases(aliases))
LOG_ERROR("PLANE", "Texture Aliase konnten nicht angewandt werden");
}
示例3: generateRandomVelocityTexture
TexturePtr RandomTools::generateRandomVelocityTexture()
{
// PPP: Temp workaround for DX 11 which does not seem to like usage dynamic
// TextureUsage usage = (Root::getSingletonPtr()->getRenderSystem()->getName()=="Direct3D11 Rendering Subsystem") ?
// TU_DEFAULT : TU_DYNAMIC;
TexturePtr texPtr = TextureManager::getSingleton().createManual(
"RandomVelocityTexture",
// ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
"General",
TEX_TYPE_1D,
1024, 1, 1,
0,
PF_FLOAT32_RGBA);//,
//usage);
HardwarePixelBufferSharedPtr pixelBuf = texPtr->getBuffer();
// Lock the buffer so we can write to it.
pixelBuf->lock(HardwareBuffer::HBL_DISCARD);
const PixelBox &pb = pixelBuf->getCurrentLock();
float *randomData = static_cast<float*>(pb.data);
// float randomData[NUM_RAND_VALUES * 4];
for(int i = 0; i < NUM_RAND_VALUES * 4; i++)
{
randomData[i] = float( (rand() % 10000) - 5000 );
}
// PixelBox pixelBox(1024, 1, 1, PF_FLOAT32_RGBA, &randomData[0]);
// pixelBuf->blitFromMemory(pixelBox);
pixelBuf->unlock();
return texPtr;
}
示例4: mDashCam
Dashboard::Dashboard() :
mDashCam(0)
, mDashboardListener(0)
, rttTex(0)
{
TexturePtr rttTexPtr = TextureManager::getSingleton().createManual("dashtexture", ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, TEX_TYPE_2D, 1024, 512, 0, PF_R8G8B8, TU_RENDERTARGET, new ResourceBuffer());
rttTex = rttTexPtr->getBuffer()->getRenderTarget();
mDashCam = gEnv->sceneManager->createCamera("DashCam");
mDashCam->setNearClipDistance(1.0);
mDashCam->setFarClipDistance(10.0);
mDashCam->setPosition(Vector3(0.0, -10000.0, 0.0));
mDashCam->setAspectRatio(2.0);
Viewport *v = rttTex->addViewport(mDashCam);
v->setClearEveryFrame(true);
v->setBackgroundColour(ColourValue::Black);
//v->setOverlaysEnabled(false);
MaterialPtr mat = MaterialManager::getSingleton().getByName("renderdash");
mat->getTechnique(0)->getPass(0)->getTextureUnitState(0)->setTextureName("dashtexture");
mDashboardListener = new DashboardListener();
rttTex->addListener(mDashboardListener);
mDashboardListener->dashOverlay = OverlayManager::getSingleton().getByName("tracks/3D_DashboardOverlay");
mDashboardListener->needlesOverlay = OverlayManager::getSingleton().getByName("tracks/3D_NeedlesOverlay");
mDashboardListener->blendOverlay = OverlayManager::getSingleton().getByName("tracks/3D_BlendOverlay");
mDashboardListener->truckHUDOverlay = OverlayManager::getSingleton().getByName("tracks/TruckInfoBox");
// mDashboardListener->dbdebugOverlay = OverlayManager::getSingleton().getByName("Core/DebugOverlay");
// mDashboardListener->dbeditorOverlay = OverlayManager::getSingleton().getByName("tracks/EditorOverlay");
}
示例5: init
bool SurveyMapTextureCreator::init()
{
TexturePtr texture = TextureManager::getSingleton().createManual(getTextureName(), ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, TEX_TYPE_2D, 2048, 2048, TU_RENDERTARGET, PF_R8G8B8, TU_RENDERTARGET, new ResourceBuffer());
if ( texture.isNull() ) return false;;
mRttTex = texture->getBuffer()->getRenderTarget();
if ( !mRttTex ) return false;
mRttTex->setAutoUpdated(false);
mCamera = gEnv->sceneManager->createCamera(getCameraName());
mViewport = mRttTex->addViewport(mCamera);
mViewport->setBackgroundColour(ColourValue::Black);
mViewport->setOverlaysEnabled(false);
mViewport->setShadowsEnabled(false);
mViewport->setSkiesEnabled(false);
mMaterial = MaterialManager::getSingleton().create(getMaterialName(), ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME);
if ( mMaterial.isNull() ) return false;
mTextureUnitState = mMaterial->getTechnique(0)->getPass(0)->createTextureUnitState(getTextureName());
mRttTex->addListener(this);
mCamera->setFixedYawAxis(false);
mCamera->setProjectionType(PT_ORTHOGRAPHIC);
mCamera->setNearClipDistance(1.0f);
return true;
}
示例6: create
void WaterRTT::create()
{
if (!mSceneMgr) return;
mCamera = mSceneMgr->createCamera("PlaneReflectionRefraction");
if (mViewerCamera)
{
mCamera->setFarClipDistance(mViewerCamera->getFarClipDistance());
mCamera->setNearClipDistance(mViewerCamera->getNearClipDistance());
mCamera->setAspectRatio(mViewerCamera->getAspectRatio());
}
for (unsigned int i = 0; i < 2; ++i)
{
if (i==0 && !mReflect) continue;
if (i==1 && !mRefract) continue;
TexturePtr tex = TextureManager::getSingleton().createManual(i == 0 ? "PlaneReflection" : "PlaneRefraction",
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, TEX_TYPE_2D, mRTTSize, mRTTSize, 0, PF_R8G8B8, TU_RENDERTARGET);
RenderTarget* rtt = tex->getBuffer()->getRenderTarget();
Viewport* vp = rtt->addViewport(mCamera);
vp->setOverlaysEnabled(false);
vp->setBackgroundColour(ColourValue(0.8f, 0.9f, 1.0f));
vp->setShadowsEnabled(false);
vp->setMaterialScheme ("reflection");
vp->setVisibilityMask( i == 0 ? RV_WaterReflect : RV_WaterRefract);
rtt->addListener(this);
if (i == 0) mReflectionTarget = rtt;
else mRefractionTarget = rtt;
}
sh::Factory::getInstance ().setTextureAlias ("WaterReflection", "PlaneReflection");
sh::Factory::getInstance ().setTextureAlias ("WaterRefraction", "PlaneRefraction");
}
示例7: mSceneMgr
HeatHaze::HeatHaze(SceneManager *sceneMgr, RenderWindow *mWindow, Ogre::Camera *cam) : mSceneMgr(sceneMgr), rttTex(0), listener(0)
{
TexturePtr rttTexPtr = TextureManager::getSingleton().createManual("heathaze_rtt", ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, TEX_TYPE_2D, cam->getViewport()->getWidth(), cam->getViewport()->getHeight(), 0, PF_R8G8B8, TU_RENDERTARGET, new ResourceBuffer());
rttTex = rttTexPtr->getBuffer()->getRenderTarget();
{
/*
// we use the main camera now
mHazeCam = mSceneMgr->createCamera("Hazecam");
mHazeCam->setNearClipDistance(1.0);
mHazeCam->setFarClipDistance(1000.0);
mHazeCam->setPosition(Vector3(0, 0, 0));
*/
//mHazeCam->setAspectRatio(2.0);
// setup viewport
Viewport *v = rttTex->addViewport(cam);
//v->setClearEveryFrame(true);
//v->setBackgroundColour(ColourValue::Black);
v->setOverlaysEnabled(false);
// setup projected material
MaterialPtr mat = MaterialManager::getSingleton().getByName("tracks/HeatHazeMat");
tex = mat->getTechnique(0)->getPass(0)->getTextureUnitState(1);
tex->setTextureName("heathaze_rtt");
tex->setProjectiveTexturing(true, cam);
listener = new HeatHazeListener(mSceneMgr);
rttTex->addListener(listener);
rttTex->setAutoUpdated(false);
}
}
示例8: NxScreen
NxScreen * NxScreenManager::CreateExternalWindow( int MonitorID, bool FullScreen, unsigned int Width, unsigned int Height )
{
//Viewport * mainviewport = NxEngine::getSingleton().GetNxViewport();
//Log("Enabling Output Compositor...");
//Ogre::CompositorManager::getSingleton().setCompositorEnabled( mainviewport , "NxCompositorOutput" , true );
//Log("Enabling Output Compositor:Done");
static bool Initialized = false;
if( !Initialized )
{
TexturePtr tester = TextureManager::getSingleton().createManual( "RTT_Texture_100",
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, Ogre::TEX_TYPE_2D,
32, 32, 0, Ogre::PF_BYTE_BGR, TU_RENDERTARGET );
mRenderTexture = tester->getBuffer(0,0)->getRenderTarget();
mRenderTexture->setAutoUpdated( false );
//Ogre::Viewport * NxViewport = mRenderTexture->addViewport( NxEngine::getSingleton().GetNxCamera() ); // view from main scene
Ogre::Viewport * NxViewport = mRenderTexture->addViewport( NxEngine::getSingleton().GetNxWindow()->GetViewport(0)->GetViewport()->getCamera( ) );
Initialized = true;
}
NxScreen * Output = new NxScreen( MonitorID, FullScreen, Width, Height );
MonitorListActive.push_back( Output );
return Output ;
}
示例9: updateTexture
void CoverageMap::updateTexture()
{
// write the edit buffer into the texture's pixel buffer
HardwarePixelBufferSharedPtr buffer = mTexture->getBuffer();
PixelBox pixelBox (mWidth, mHeight, 1, getFormat(mChannels), mData);
Image::Box imageBox (0, 0, mWidth, mHeight);
buffer->blitFromMemory(pixelBox, imageBox);
}
示例10: createHalftoneTexture
bool gkOgreCompositorHelper::createHalftoneTexture()
{
using namespace Ogre;
try
{
if (TextureManager::getSingleton().resourceExists(COMP_HALFTONE_TEX_NAME))
return true; //already created
TexturePtr tex = TextureManager::getSingleton().createManual(
COMP_HALFTONE_TEX_NAME,
Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
TEX_TYPE_3D,
64,64,64,
0,
PF_A8
);
HardwarePixelBufferSharedPtr ptr = tex->getBuffer(0,0);
ptr->lock(HardwareBuffer::HBL_DISCARD);
const PixelBox &pb = ptr->getCurrentLock();
uint8 *data = static_cast<uint8*>(pb.data);
size_t height = pb.getHeight();
size_t width = pb.getWidth();
size_t depth = pb.getDepth();
size_t rowPitch = pb.rowPitch;
size_t slicePitch = pb.slicePitch;
for (size_t z = 0; z < depth; ++z)
{
for (size_t y = 0; y < height; ++y)
{
for(size_t x = 0; x < width; ++x)
{
float fx = 32-(float)x+0.5f;
float fy = 32-(float)y+0.5f;
float fz = 32-((float)z)/3+0.5f;
float distanceSquare = fx*fx+fy*fy+fz*fz;
data[slicePitch*z + rowPitch*y + x] = 0x00;
if (distanceSquare < 1024.0f)
data[slicePitch*z + rowPitch*y + x] += 0xFF;
}
}
}
ptr->unlock();
}
catch (Exception &e)
{
gkPrintf("[CMP] FAILED - Halftone Texture Creation. %s", e.getFullDescription().c_str());
return false;
}
return true;
}
示例11: createMaterial
void WebView::createMaterial()
{
if(opacity > 1) opacity = 1;
else if(opacity < 0) opacity = 0;
if(!Bitwise::isPO2(viewWidth) || !Bitwise::isPO2(viewHeight))
{
if(Root::getSingleton().getRenderSystem()->getCapabilities()->hasCapability(RSC_NON_POWER_OF_2_TEXTURES))
{
if(Root::getSingleton().getRenderSystem()->getCapabilities()->getNonPOW2TexturesLimited())
compensateNPOT = true;
}
else compensateNPOT = true;
#ifdef __APPLE__
//cus those fools always report #t when I ask if they support this or that
//and then fall back to their buggy and terrible software driver which has never once in my life rendered a single correct frame.
compensateNPOT=true;
#endif
if(compensateNPOT)
{
texWidth = Bitwise::firstPO2From(viewWidth);
texHeight = Bitwise::firstPO2From(viewHeight);
}
}
// Create the texture
#if defined(HAVE_AWESOMIUM) || !defined(__APPLE__)
TexturePtr texture = TextureManager::getSingleton().createManual(
viewName + "Texture", ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
TEX_TYPE_2D, texWidth, texHeight, 0, PF_BYTE_BGRA,
TU_DYNAMIC_WRITE_ONLY_DISCARDABLE, this);
HardwarePixelBufferSharedPtr pixelBuffer = texture->getBuffer();
pixelBuffer->lock(HardwareBuffer::HBL_DISCARD);
const PixelBox& pixelBox = pixelBuffer->getCurrentLock();
texDepth = Ogre::PixelUtil::getNumElemBytes(pixelBox.format);
texPitch = (pixelBox.rowPitch*texDepth);
uint8* pDest = static_cast<uint8*>(pixelBox.data);
memset(pDest, 128, texHeight*texPitch);
pixelBuffer->unlock();
#endif
MaterialPtr material = MaterialManager::getSingleton().create(viewName + "Material",
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME);
matPass = material->getTechnique(0)->getPass(0);
matPass->setSceneBlending(SBT_TRANSPARENT_ALPHA);
matPass->setDepthWriteEnabled(false);
baseTexUnit = matPass->createTextureUnitState(viewName + "Texture");
baseTexUnit->setTextureFiltering(texFiltering, texFiltering, FO_NONE);
if(texFiltering == FO_ANISOTROPIC)
baseTexUnit->setTextureAnisotropy(4);
}
示例12: generate
void Sample_VolumeTex::generate()
{
/* Evaluate julia fractal for each point */
Julia julia(global_real, global_imag, global_theta);
const float scale = 2.5;
const float vcut = 29.0f;
const float vscale = 1.0f/vcut;
HardwarePixelBufferSharedPtr buffer = ptex->getBuffer(0, 0);
Ogre::StringStream d;
d << "HardwarePixelBuffer " << buffer->getWidth() << " " << buffer->getHeight() << " " << buffer->getDepth();
LogManager::getSingleton().logMessage(d.str());
buffer->lock(HardwareBuffer::HBL_NORMAL);
const PixelBox &pb = buffer->getCurrentLock();
d.str("");
d << "PixelBox " << pb.getWidth() << " " << pb.getHeight() << " " << pb.getDepth() << " " << pb.rowPitch << " " << pb.slicePitch << " " << pb.data << " " << PixelUtil::getFormatName(pb.format);
LogManager::getSingleton().logMessage(d.str());
Ogre::uint32 *pbptr = static_cast<Ogre::uint32*>(pb.data);
for(size_t z=pb.front; z<pb.back; z++)
{
for(size_t y=pb.top; y<pb.bottom; y++)
{
for(size_t x=pb.left; x<pb.right; x++)
{
if(z==pb.front || z==(pb.back-1) || y==pb.top|| y==(pb.bottom-1) ||
x==pb.left || x==(pb.right-1))
{
// On border, must be zero
pbptr[x] = 0;
}
else
{
float val = julia.eval(((float)x/pb.getWidth()-0.5f) * scale,
((float)y/pb.getHeight()-0.5f) * scale,
((float)z/pb.getDepth()-0.5f) * scale);
if(val > vcut)
val = vcut;
PixelUtil::packColour((float)x/pb.getWidth(), (float)y/pb.getHeight(), (float)z/pb.getDepth(), (1.0f-(val*vscale))*0.7f, PF_A8R8G8B8, &pbptr[x]);
}
}
pbptr += pb.rowPitch;
}
pbptr += pb.getSliceSkip();
}
buffer->unlock();
}
示例13: SaveImage
void SaveImage(TexturePtr TextureToSave, String filename)
{
HardwarePixelBufferSharedPtr readbuffer;
readbuffer = TextureToSave->getBuffer(0, 0);
readbuffer->lock(HardwareBuffer::HBL_NORMAL );
const PixelBox &readrefpb = readbuffer->getCurrentLock();
uchar *readrefdata = static_cast<uchar*>(readrefpb.data);
Image img;
img = img.loadDynamicImage (readrefdata, TextureToSave->getWidth(),
TextureToSave->getHeight(), TextureToSave->getFormat());
img.save(filename);
readbuffer->unlock();
}
示例14: setup
void OgreRTT::setup(Ogre::SceneManager *inSceneMgr, Ogre::RenderWindow *inWindow, Ogre::Camera *inCamera)
{
mSceneMgr = inSceneMgr;
mWindow = inWindow;
mCamera = inCamera;
using namespace Ogre;
TexturePtr mTexture =
TextureManager::getSingleton().createManual(
"RttTex_" + gUID,
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
TEX_TYPE_2D,
mWindow->getWidth(),
mWindow->getHeight(),
0,
Ogre::PF_R8G8B8, Ogre::TU_RENDERTARGET);
mRenderTexture = mTexture->getBuffer()->getRenderTarget();
mViewport = mRenderTexture->addViewport(mCamera);
mViewport->setVisibilityMask(GfxEngine::ENTITY_MASK);
mViewport->setClearEveryFrame(true);
mViewport->setBackgroundColour(Ogre::ColourValue::Black);
mViewport->setOverlaysEnabled(false);
// Create the rectangle
mMiniScreen = new Ogre::Rectangle2D(true);
mMiniScreen->setCorners(0.5f, -0.5f, 1.0f, -1.0f);
mMiniScreen->setBoundingBox(Ogre::AxisAlignedBox(-100000.0f * Ogre::Vector3::UNIT_SCALE, 100000.0f * Ogre::Vector3::UNIT_SCALE));
mSceneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("MiniScreenNode_" + gUID);
mSceneNode->attachObject(mMiniScreen);
// Create the material
mMatPtr = Ogre::MaterialManager::getSingleton().create("RttMat_" + gUID, Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME);
mMatTechnique = mMatPtr->createTechnique();
mMatTechnique->createPass();
mMatPtr->getTechnique(0)->getPass(0)->setLightingEnabled(false);
mMatPtr->getTechnique(0)->getPass(0)->createTextureUnitState("RttTex_" + gUID);
mMiniScreen->setMaterial("RttMat_" + gUID);
++gUID;
GfxEngine::getSingletonPtr()->attachRTT(this);
}
示例15: getMaterial
MaterialPtr Visuals::getMaterial(std::string name, int red, int green, int blue, int alpha) {
// Create the texture
TexturePtr texture = TextureManager::getSingleton().createManual(
name, // name
ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
TEX_TYPE_2D, // type
256, 256, // width & height
0, // number of mipmaps
PF_BYTE_BGRA, // pixel format
TU_DEFAULT); // usage; should be TU_DYNAMIC_WRITE_ONLY_DISCARDABLE for
// textures updated very often (e.g. each frame)
// Get the pixel buffer
HardwarePixelBufferSharedPtr pixelBuffer = texture->getBuffer();
// Lock the pixel buffer and get a pixel box
pixelBuffer->lock(HardwareBuffer::HBL_NORMAL); // for best performance use HBL_DISCARD!
const PixelBox& pixelBox = pixelBuffer->getCurrentLock();
uint8* pDest = static_cast<uint8*>(pixelBox.data);
// Fill in some pixel data. This will give a semi-transparent blue,
// but this is of course dependent on the chosen pixel format.
for (size_t j = 0; j < 256; j++) {
for(size_t i = 0; i < 256; i++)
{
*pDest++ = blue; // B
*pDest++ = green; // G
*pDest++ = red; // R
*pDest++ = alpha; // A
}
}
// Unlock the pixel buffer
pixelBuffer->unlock();
MaterialPtr material = MaterialManager::getSingleton().create(name, ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME);
material->getTechnique(0)->getPass(0)->createTextureUnitState(name);
material->getTechnique(0)->getPass(0)->setSceneBlending(SBT_TRANSPARENT_ALPHA);
return material;
}