本文整理汇总了C++中RenderDevice类的典型用法代码示例。如果您正苦于以下问题:C++ RenderDevice类的具体用法?C++ RenderDevice怎么用?C++ RenderDevice使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了RenderDevice类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: MonitorEnumFunc
// Fallback monitor enumeration in case newly plugged in monitor wasn't detected.
// Added originally for the FactoryTest app.
// New Outputs don't seem to be detected unless adapter is re-created, but that would also
// require us to re-initialize D3D11 (recreating objects, etc). This bypasses that for "fake"
// fullscreen modes.
BOOL CALLBACK MonitorEnumFunc(HMONITOR hMonitor, HDC, LPRECT, LPARAM dwData)
{
RenderDevice* renderer = (RenderDevice*)dwData;
MONITORINFOEX monitor;
monitor.cbSize = sizeof(monitor);
if (::GetMonitorInfo(hMonitor, &monitor) && monitor.szDevice[0])
{
DISPLAY_DEVICE dispDev;
memset(&dispDev, 0, sizeof(dispDev));
dispDev.cb = sizeof(dispDev);
if (::EnumDisplayDevices(monitor.szDevice, 0, &dispDev, 0))
{
if (strstr(String(dispDev.DeviceName).ToCStr(), renderer->GetParams().MonitorName.ToCStr()))
{
renderer->FSDesktopX = monitor.rcMonitor.left;
renderer->FSDesktopY = monitor.rcMonitor.top;
return FALSE;
}
}
}
return TRUE;
}
示例2: getSonicSculptureMaterial
// Helper function
static shared_ptr<UniversalMaterial> getSonicSculptureMaterial(int index) {
shared_ptr<Texture> lambertianTex = Texture::createEmpty(format("Sonic Sculpture %d", index), 512, 1, ImageFormat::RGBA16F());
static shared_ptr<Framebuffer> fb = Framebuffer::create("Sonic Sculpture Lambertian FB Clearer");
fb->set(Framebuffer::COLOR0, lambertianTex);
RenderDevice* rd = RenderDevice::current;
rd->push2D(fb); {
rd->setColorClearValue(Color3::white() * 0.9f);
rd->clear();
} rd->pop2D();
lambertianTex->generateMipMaps();
UniversalMaterial::Specification spec;
spec.setLambertian(lambertianTex);
static uint32 dummyBytes[512];
for (int i = 0; i < 512; ++i) {
dummyBytes[i] = 4294967295;
}
shared_ptr<Texture> emissiveTex = Texture::fromMemory(format("Sonic Sculpture %d Emissive", index), dummyBytes, ImageFormat::RGBA8(), 512, 1, 1, 1, ImageFormat::RGBA16F());
fb->set(Framebuffer::COLOR0, emissiveTex);
rd->push2D(fb); {
rd->setColorClearValue(Color3::black());
rd->clear();
} rd->pop2D();
emissiveTex->generateMipMaps();
spec.setEmissive(emissiveTex);
//spec.setBump(System::findDataFile("material/10538-bump.jpg"));
return UniversalMaterial::create(spec);
}
示例3: RenderDevice
// Implement static initializer function to create this class.
RenderDevice* RenderDevice::CreateDevice(const RendererParams& rp, void* oswnd)
{
RenderDevice* p = new RenderDevice(rp, (HWND)oswnd);
if (p)
{
if (!p->Device)
{
p->Release();
p = 0;
}
}
return p;
}
示例4: handlePlayPulses
void App::handlePlayPulses() {
for (int i = m_currentPlayPulses.size() - 1; i >= 0; --i) {
int currentSampleIndex = (g_sampleWindowIndex * g_currentAudioBuffer.size());
shared_ptr<SonicSculpturePiece> piece = m_currentPlayPulses[i].piece;
int endIndex = m_currentPlayPulses[i].initialSample + (piece->size() * g_currentAudioBuffer.size());
RenderDevice* rd = RenderDevice::current;
static shared_ptr<Framebuffer> playPulseFB = Framebuffer::create("Play Pulse FB");
shared_ptr<UniversalMaterial> material = piece->material();
if (currentSampleIndex >= endIndex) {
playPulseFB->set(Framebuffer::COLOR0, material->emissive().texture());
rd->push2D(playPulseFB); {
rd->setColorClearValue(Color3::black());
rd->clear();
} rd->pop2D();
material->emissive().texture()->generateMipMaps();
m_currentPlayPulses.remove(i);
continue;
}
float alpha = float(currentSampleIndex - m_currentPlayPulses[i].initialSample) / (endIndex - m_currentPlayPulses[i].initialSample);
playPulseFB->set(Framebuffer::COLOR0, material->emissive().texture());
rd->push2D(playPulseFB); {
Args args;
args.setUniform("pulsePos", alpha * playPulseFB->width());
args.setRect(rd->viewport());
LAUNCH_SHADER("playPulse.pix", args);
} rd->pop2D();
material->emissive().texture()->generateMipMaps();
}
}
示例5: GetRenderDevice
void Camera::render( RenderBlock& block, bool clearView )
{
if (!activeView) return;
RenderDevice* renderDevice = GetRenderDevice();
renderDevice->setActiveView( activeView );
if (clearView)
renderDevice->clearView();
for (auto it= drawer.renderables.end() -1;it >= drawer.renderables.begin();--it)
block.renderables.Insert(block.renderables.begin(), *it);
renderDevice->render( block );
}
示例6: lock
void D3DTexture::Init(const std::string& imagePath, RenderDevice& device)
{
concurrency::create_async([&]() {
CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
{
std::lock_guard<std::recursive_mutex> lock(mMutex);
ThrowIfFailed(DirectX::CreateWICTextureFromFile(mDevice, mContext, converter.from_bytes(imagePath).c_str(), nullptr, &mColorTexture), "CreateWICTextureFromFile() failed.");
}
// Create a texture sampler
D3D11_SAMPLER_DESC samplerDesc;
ZeroMemory(&samplerDesc, sizeof(samplerDesc));
samplerDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
samplerDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
{
std::lock_guard<std::recursive_mutex> lock(mMutex);
ThrowIfFailed(mDevice->CreateSamplerState(&samplerDesc, &mColorSampler), "ID3D11Device::CreateSamplerState() failed.");
}
device.ResourceLoaded();
});
}
示例7: GetRenderDevice
void Camera::render( RenderBlock& block, bool clearView )
{
if( !activeView ) return;
RenderDevice* renderDevice = GetRenderDevice();
renderDevice->setActiveView( activeView );
if( clearView )
renderDevice->clearView();
block.renderables.insert(
block.renderables.begin(),
drawer.renderables.begin(),
drawer.renderables.end() );
renderDevice->render( block );
}
示例8: Render
/**
* Render this component
*/
void RenderComponent::Render(const RenderDevice& device)
{
// Set the texture resource
device.GetImmediateContext()->PSSetShaderResources(0, 1, &_texture);
// Set the vertex buffer
_mesh->Render(device);
}
示例9: onInit
// Called before the application loop begins. Load data here and
// not in the constructor so that common exceptions will be
// automatically caught.
void App::onInit() {
RenderDevice* rd = renderDevice;
Vector2 destSize(1024, 1024);
const Rect2D& dest = Rect2D::xywh(Vector2(0, 0), destSize);
Args args;
// args.appendToPreamble("#define KERNEL_RADIUS 9\nfloat gaussCoef[KERNEL_RADIUS] = float[KERNEL_RADIUS](0.00194372, 0.00535662, 0.01289581, 0.02712094, 0.04982645, 0.07996757, 0.11211578, 0.13731514, 0.14691596);");
rd->push2D(dest); {
args.setRect(dest);
LAUNCH_SHADER("apply.*", args);
} rd->pop2D();
// Or Equivalently:
//GaussianBlur::apply(renderDevice, Texture::createEmpty("test",1024,1024));
}
示例10: Initialize
/**
* Initialize the input layout given the current description
* @param
* const RenderDevice& The render device used to create
* @param
* ID3D10Blob* The target shader for this input layout
* @return
* bool Returns true if successful
*/
bool InputLayout::Initialize(const RenderDevice& device, ID3D10Blob* targetShader)
{
// Get the layout description
D3D11_INPUT_ELEMENT_DESC* layoutDescription = GetInputLayoutDesc();
// Initialize the layout
HRESULT result = device.GetD3DDevice()->CreateInputLayout(layoutDescription, _parameters.size(), targetShader->GetBufferPointer(), targetShader->GetBufferSize(), &_inputLayout);
return SUCCEEDED(result);
}
示例11: lighting
void ModelContainerView::doGraphics() {
i_App->renderDevice->clear();
RenderDevice *rd = i_App->renderDevice;
rd->setProjectionAndCameraMatrix(i_App->debugCamera);
LightingParameters lighting(GameTime(toSeconds(10, 00, AM)));
//i_SkyRef->render(rd,lighting);
rd->setAmbientLightColor(Color4(Color3::blue()));
rd->enableLighting();
GLight light =GLight::directional(i_App->debugController.getPosition() + i_App->debugController.getLookVector()*2,Color3::white());
rd->setLight(0,light);
Array<std::string > keys = iTriVarTable.getKeys();
Array<std::string>::ConstIterator i = keys.begin();
while(i != keys.end()) {
VAR* var = iTriVarTable.get(*i);
Array<int> indexArray = iTriIndexTable.get(*i);
rd->beginIndexedPrimitives();
rd->setVertexArray(*var);
rd->sendIndices(RenderDevice::LINES, indexArray);
rd->endIndexedPrimitives();
++i;
}
i_App->renderDevice->disableLighting();
for(int i=0; i<gBoxArray.size(); ++i) {
AABox b = gBoxArray[i];
Draw::box(b,rd,Color3::red());
}
if(iDrawLine) {
Draw::lineSegment(LineSegment::fromTwoPoints(iPos1, iPos2), rd, iColor, 3);
if(myfound) {
Draw::lineSegment(LineSegment::fromTwoPoints(p1, p2), rd, iColor, 3);
Draw::lineSegment(LineSegment::fromTwoPoints(p2, p3), rd, iColor, 3);
Draw::lineSegment(LineSegment::fromTwoPoints(p3, p1), rd, iColor, 3);
Draw::sphere(Sphere(p4,0.5),rd, iColor);
Draw::sphere(Sphere(p5,0.5),rd, Color3::green());
}
}
}
示例12: Apply
void MatrixStack::Apply(RenderDevice& render_device)
{
if(_state_dirty)
{
State& current_state = _states.top();
render_device.SetUniformMatrix4f("view_matrix", current_state.view_matrix);
// model_view = view * model
Mat4x4 model_view = matrix::Multiply(current_state.view_matrix, current_state.model_matrix);
render_device.SetUniformMatrix4f("model_view_matrix", model_view);
// Build our model view projection matrix
// model_view_projection = projection * view * model
Mat4x4 model_view_projection = matrix::Multiply(current_state.projection_matrix, model_view);
render_device.SetUniformMatrix4f("model_view_projection_matrix", model_view_projection);
_state_dirty = false;
}
}
示例13: Render
void Render()
{
RenderDevice* device = Environment::GetSingleton().GetRenderDevice();
SceneManager* sceneMan = Environment::GetSingleton().GetSceneManager();
device->GetScreenFrameBuffer()->Clear(CF_Color | CF_Depth, ColorRGBA::White, 1, 0);
float w = static_cast<float>( mMainWindow->GetWidth() );
float h = static_cast<float>( mMainWindow->GetHeight() );
mTessEffect->GetParameterByName("ViewportDim")->SetValue(float2(w, h));
//device->Draw(mBezierCurveEffect->GetTechniqueByName("BezierCurve"), mBezierCurveROP);
device->Draw(mTessEffect->GetTechniqueByName("TessQuad"), mTessQuadROP);
/*float4x4 world = CreateScaling(5, 5, 5) * CreateTranslation(0, 0, 60);
mTessEffect->GetParameterByName("TessLevel")->SetValue(100);
mTessEffect->GetParameterByName("World")->SetValue(world);
mTessEffect->GetParameterByName("ViewProj")->SetValue(mCamera->GetEngineViewProjMatrix());
device->Draw(mTessEffect->GetTechniqueByName("TessTeapot"), mTessTeapotROP);*/
device->GetScreenFrameBuffer()->SwapBuffers();
}
示例14: main
int main(int argc, char** argv) {
RenderDevice* rd = new RenderDevice();
OSWindow::Settings settings;
settings.width = 960;
settings.height = 600;
rd->init(settings);
for (int i = 0; i < 100; ++i) {
drawFrame(settings.width, settings.height, i);
// Render at 30 fps
System::sleep(1.0/30.0);
// See also RenderDevice::beginFrame, RenderDevice::endFrame
rd->swapBuffers();
}
rd->cleanup();
delete rd;
return 0;
}
示例15: OVR_UNUSED
//.........这里部分代码省略.........
case Key_PageUp:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustAspect : 0;
AdjustDirection = 1;
break;
case Key_PageDown:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustAspect : 0;
AdjustDirection = -1;
break;
// Distortion correction adjustments
case Key_H:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK0 : NULL;
AdjustDirection = -1;
break;
case Key_Y:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK0 : NULL;
AdjustDirection = 1;
break;
case Key_J:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK1 : NULL;
AdjustDirection = -1;
break;
case Key_U:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK1 : NULL;
AdjustDirection = 1;
break;
case Key_K:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK2 : NULL;
AdjustDirection = -1;
break;
case Key_I:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK2 : NULL;
AdjustDirection = 1;
break;
case Key_L:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK3 : NULL;
AdjustDirection = -1;
break;
case Key_O:
pAdjustFunc = down ? &OculusWorldDemoApp::AdjustDistortionK3 : NULL;
AdjustDirection = 1;
break;
case Key_C:
if (down)
{
// Toggle chromatic aberration correction on/off.
RenderDevice::PostProcessShader shader = pRender->GetPostProcessShader();
if (shader == RenderDevice::PostProcessShader_Distortion)
{
pRender->SetPostProcessShader(RenderDevice::PostProcessShader_DistortionAndChromAb);
SetAdjustMessage("Chromatic Aberration Correction On");
}
else if (shader == RenderDevice::PostProcessShader_DistortionAndChromAb)
{
pRender->SetPostProcessShader(RenderDevice::PostProcessShader_Distortion);
SetAdjustMessage("Chromatic Aberration Correction Off");
}
else
OVR_ASSERT(false);
}
break;
case Key_F9:
#ifndef OVR_OS_LINUX // On Linux F9 does the same as F11.
if (!down)
{
CycleDisplay();
}
break;
#endif
#ifdef OVR_OS_MAC
case Key_F10: // F11 is reserved on Mac
#else
case Key_F11:
#endif
if (!down)
{
RenderParams = pRender->GetParams();
RenderParams.Display = DisplayId(SConfig.GetHMDInfo().DisplayDeviceName,SConfig.GetHMDInfo().DisplayId);
pRender->SetParams(RenderParams);
pPlatform->SetMouseMode(Mouse_Normal);
pPlatform->SetFullscreen(RenderParams, pRender->IsFullscreen() ? Display_Window : Display_FakeFullscreen);
pPlatform->SetMouseMode(Mouse_Relative); // Avoid mode world rotation jump.
// If using an HMD, enable post-process (for distortion) and stereo.
if(RenderParams.IsDisplaySet() && pRender->IsFullscreen())
{
SConfig.SetStereoMode(Stereo_LeftRight_Multipass);
PostProcess = PostProcess_Distortion;
}
}
break;
default:
break;
}
}